You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ni...@apache.org on 2017/01/03 11:43:06 UTC

[01/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Repository: ambari
Updated Branches:
  refs/heads/trunk 672fdf06b -> 853a1ce78


http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java
new file mode 100644
index 0000000..a15e5d4
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java
@@ -0,0 +1,263 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class DataParserJSONTest {
+
+  @Test
+  public void testParsePreviewJSON() throws Exception {
+    String str = "[ {\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"5.4\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" },"
+            + "{\"col1\": \"e\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"8.4\" },"
+            + "{\"col1\": \"f\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"9.4\" },"
+            + "{\"col1\": \"g\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"10.4\" },"
+            + "{\"col1\": \"h\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"11.4\" },"
+            + "{\"col1\": \"i\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4\" },"
+            + "{\"col1\": \"j\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"5\" },"
+            + "{\"col1\": \"k\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6\" },"
+            + "{\"col1\": \"l\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7\" },"
+            + "{\"col1\": \"m\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"24.4\" },"
+            + "{\"col1\": \"n\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"14.4\" },"
+            + "{\"col1\": \"o\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"34.4\" },"
+            + "{\"col1\": \"p\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"44.4\" },"
+            + "{\"col1\": \"q\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"54.4\" },"
+            + "{\"col1\": \"r\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"64.4\" }"
+            + "]";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS, 7);
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp =  new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(7, pd.getPreviewRows().size()); // header row + preview rows
+      Assert.assertEquals(14, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
+              new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
+              new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+              new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.STRING.toString(), 3),
+              new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.STRING.toString(), 4),
+              new ColumnDescriptionImpl("col6", ColumnDescriptionShort.DataTypes.STRING.toString(), 5),
+              new ColumnDescriptionImpl("col7", ColumnDescriptionShort.DataTypes.STRING.toString(), 6),
+              new ColumnDescriptionImpl("col8", ColumnDescriptionShort.DataTypes.STRING.toString(), 7),
+              new ColumnDescriptionImpl("col9", ColumnDescriptionShort.DataTypes.STRING.toString(), 8),
+              new ColumnDescriptionImpl("col10", ColumnDescriptionShort.DataTypes.STRING.toString(), 9),
+              new ColumnDescriptionImpl("col11", ColumnDescriptionShort.DataTypes.STRING.toString(), 10),
+              new ColumnDescriptionImpl("col12", ColumnDescriptionShort.DataTypes.STRING.toString(), 11),
+              new ColumnDescriptionImpl("col13", ColumnDescriptionShort.DataTypes.STRING.toString(), 12),
+              new ColumnDescriptionImpl("col14", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 13)};
+
+      Row row2 = new Row(new Object[]{"a", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "4.4"});
+      Row row3 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "5.4"});
+      Row row4 = new Row(new Object[]{"c", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "6.4"});
+      Row row5 = new Row(new Object[]{"d", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "7.4"});
+      Row row6 = new Row(new Object[]{"e", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "8.4"});
+      Row row7 = new Row(new Object[]{"f", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "9.4"});
+      Row row8 = new Row(new Object[]{"g", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "10.4"});
+
+      Row[] rows = { row2, row3, row4, row5, row6, row7, row8};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * additional columns in rows of JSON are ignored.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws Exception {
+    String str = "[ {\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" , \"col14\" : \"43.4\" ,\"col15\" : \"asafsfa\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" }"
+            + "]";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp =  new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "43.4"});
+      Assert.assertArrayEquals("More number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow());
+    }
+  }
+
+  /**
+   * less columns in json makes them null.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws Exception {
+    String str = "[ " +
+            "{\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\"  },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" }"
+            + "]";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp =  new DataParser(sr, parseOptions)
+    ) {
+      PreviewData pd = dp.parsePreview();
+
+      Assert.assertNull(pd.getPreviewRows().get(1).getRow()[13]);
+    }
+  }
+
+  /**
+   * illegal json format gives error
+   *
+   * @throws IOException
+   */
+  @Test(expected = IllegalArgumentException.class)
+  public void testWrongJsonFormat() throws Exception {
+    String str = "[ " +
+            "{\"col1\" : \"a\", \n\"col2\": \"abcd\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\"  },"
+            + "{\"col1\": \"d\",, \n\"col2\": \"abcd\"  }"       // extra comma in this line
+            + "]";
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+    ) {
+      PreviewData pd = dp.parsePreview();
+    }
+  }
+
+  /**
+   * One row JSON will give embedde column names and 1st row in preview if HEADER.EMBEDDED is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowJSON() throws Exception {
+    String str = "[ "
+      + "{\"col1\": \"d\", \n\"col2\": \"abcd\"  }"       // extra comma in this line
+      + "]";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
+        new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "d";
+      cols1[1] = "abcd";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * One row JSON will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowJSONHeaderProvided() throws Exception {
+    String str = "[ "
+      + "{\"col1\": \"d\", \n\"col2\": \"abcd\"  }"       // extra comma in this line
+      + "]";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
+        new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "d";
+      cols1[1] = "abcd";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java
new file mode 100644
index 0000000..07e7c7c
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java
@@ -0,0 +1,295 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class DataParserXMLTest {
+
+  @Test
+  public void testParsePreviewXML() throws Exception {
+    String str = "<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+      ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(2, pd.getPreviewRows().size()); // header row + preview rows
+      Assert.assertEquals(5, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
+              new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
+              new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+              new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.INT.toString(), 3),
+              new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.INT.toString(), 4)
+      };
+
+      Row row2 = new Row(new Object[]{"row1-col1-Value", "row1-col2-Value", "row1-col3-Value", "10", "11"});
+      Row row3 = new Row(new Object[]{"row2-col1-Value", "row2-col2-Value", "row2-col3-Value", "20", "21"});
+
+      Row[] rows = {row2, row3};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+
+  /**
+   * additional columns in rows of XML are ignored.
+   * number of columns are decided by the first row of the table and here second row contains more columns so those are ignored.
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws Exception {
+    String str ="<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+
+
+    try(    StringReader sr = new StringReader(str);
+            DataParser dp = new DataParser(sr, parseOptions);
+    ) {
+
+
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"row2-col1-Value","row2-col2-Value","row2-col3-Value","20","21"});
+      Assert.assertArrayEquals("More number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow());
+    }
+  }
+
+  /**
+   * less columns in xml makes them null.
+   * number of columns are decided by the first row of the table and here second row does not contain col99 and col100
+   * columns so those are set to null.
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws Exception {
+    String str = "<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+      ) {
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"row2-col1-Value","row2-col2-Value","row2-col3-Value",null,null,"20","21"});
+      Assert.assertArrayEquals("Less number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow());
+    }
+  }
+
+  /**
+   * illegal xml format gives error. adding illegal tag gives error
+   *
+   * @throws IOException
+   */
+  @Test(expected = IllegalArgumentException.class)
+  public void testWrongXMLFormat() throws Exception {
+    String str = "<table>" +
+            "<row>" +
+            "<ccc></ccc>" +   // illegal tag.
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser  dp = new DataParser(sr, parseOptions);
+      ) {
+      PreviewData pd = dp.parsePreview();
+    }
+  }
+
+  /**
+   * One row XML will give embedde column names and 1st row in preview if HEADER.EMBEDDED is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowXML() throws Exception {
+    String str = "<table>" +
+                      "<row>" +
+                      "<col name=\"col1\">row1-col1-Value</col>" +
+                      "<col name=\"col2\">11</col>" +
+                      "</row>" +
+                 "</table>";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+      ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
+        new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "row1-col1-Value";
+      cols1[1] = "11";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * One row XML will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowXMLHeaderProvided() throws Exception {
+    String str = "<table>" +
+                    "<row>" +
+                    "<col name=\"col1\">row1-col1-Value</col>" +
+                    "<col name=\"col2\">11</col>" +
+                    "</row>" +
+                 "</table>";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+      ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
+        new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "row1-col1-Value";
+      cols1[1] = "11";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/JsonParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/JsonParserTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/JsonParserTest.java
new file mode 100644
index 0000000..f775ea5
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/JsonParserTest.java
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.json.JSONParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class JsonParserTest {
+
+  @Test(expected = IOException.class)
+  public void testEmptyStream() throws Exception {
+    String json = "";
+
+    try(
+      StringReader sr = new StringReader(json);
+      JSONParser jp =  new JSONParser(sr, null);
+    ) {
+      // PARSING WILL THROW ERROR
+    }
+  }
+
+  @Test
+  public void testEmptyRow() throws Exception {
+    JsonObject jo = new JsonObject();
+    JsonArray ja = new JsonArray();
+    ja.add(jo);
+    String json = ja.toString();
+
+    try(
+      StringReader sr = new StringReader(json);
+      JSONParser jp = new JSONParser(sr, null)
+      ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should not be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should be empty",new Object[]{},iterator.next().getRow());
+    }
+  }
+
+
+  @Test
+  public void testEmptyTable() throws Exception {
+    JsonArray ja = new JsonArray();
+    String json = ja.toString();
+
+    try(
+      StringReader sr = new StringReader(json);
+      JSONParser jp =  new JSONParser(sr, null);
+    ) {
+      Iterator<Row> iterator = jp.iterator();
+      Assert.assertEquals("Iterator Empty!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws Exception {
+    JsonObject jo = new JsonObject();
+    jo.addProperty("key1","value1");
+    jo.addProperty("key2",'c');
+    jo.addProperty("key3",10);
+    jo.addProperty("key4",10.1);
+
+    JsonArray ja = new JsonArray();
+    ja.add(jo);
+    String json = ja.toString();
+
+    try(StringReader sr = new StringReader(json);
+
+        JSONParser jp  = new JSONParser(sr, null)
+    ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws Exception {
+    JsonObject jo1 = new JsonObject();
+    jo1.addProperty("key1","value1");
+    jo1.addProperty("key2","c");
+    jo1.addProperty("key3","10");
+    jo1.addProperty("key4","10.1");
+
+    JsonObject jo2 = new JsonObject();
+    jo2.addProperty("key1","value2");
+    jo2.addProperty("key2","c2");
+    jo2.addProperty("key3","102");
+    jo2.addProperty("key4",true);
+
+
+    JsonArray ja = new JsonArray();
+    ja.add(jo1);
+    ja.add(jo2);
+
+    String json = ja.toString();
+
+
+
+    try(
+      StringReader sr = new StringReader(json);
+      JSONParser jp = new JSONParser(sr, null)
+    ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVParserTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVParserTest.java
new file mode 100644
index 0000000..a55bf66
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVParserTest.java
@@ -0,0 +1,313 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.csv.opencsv.OpenCSVParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class OpenCSVParserTest {
+
+  /**
+   * no exception in creating csvParser with emtpy stream
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyStream() throws Exception {
+    String csv = "";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions());
+      ) {
+      Assert.assertEquals("There should not be any rows.",false, jp.iterator().hasNext());
+    }
+  }
+
+  /**
+   * in case of csv an empty line is still considered as row
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyRow() throws Exception {
+    String csv = "       ";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions());
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should not be empty",new Object[]{"       "},iterator.next().getRow());
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws Exception {
+    String csv = "value1,c,10,10.1";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions());
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws Exception {
+
+    String csv = "value1,c,10,10.1\n" +
+            "value2,c2,102,true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions());
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testQuotedAndEscapedEndline() throws Exception {
+
+    String csv = "\"row1-\ncol1\",1,1.1\n\"row2-\\\ncol1\",2,2.2\n";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"row1-\ncol1", "1", "1.1"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+
+      Row row2 = new Row(new Object[]{"row2-\ncol1", "2", "2.2"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row2, iterator.next());
+
+    }
+  }
+
+  @Test
+  public void testQuotedDoubleQuote() throws Exception {
+
+    String csv = "\"aaa\",\"b\"\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testEscapedDoubleQuote() throws Exception {
+
+    String csv = "\"aaa\",\"b\\\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+
+  @Test
+  public void testSpecialEscape() throws Exception {
+
+    String csv = "\"aaa\",\"b$\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpecialEscapedEscape() throws Exception {
+
+    String csv = "aaa,b$$bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b$bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+
+  @Test
+  public void testSpecialUnEscapedEscape() throws Exception {
+
+    String csv = "aaa,b$bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "bbb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void test001Escape() throws Exception {
+
+    String csv = "aaa,b\001\"bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());    }
+  }
+
+  @Test
+  public void testSpecialQuote() throws Exception {
+
+    String csv = "\001aaa\001,\001b\001\001bb\001,\001ccc\001";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_QUOTE,'\001');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b\001bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpaceAsDelimiterAndQuoted() throws Exception {
+
+    String csv = "aaa \"b bb\" ccc\naaa2 bbb2 \"c cc2\"";
+    ParseOptions po = new ParseOptions();
+//    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001');
+    po.setOption(ParseOptions.OPTIONS_CSV_DELIMITER,' ');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+
+      Row row2 = new Row(new Object[]{"aaa2", "bbb2", "c cc2"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row2, iterator.next());
+    }
+  }
+
+  @Test
+  public void testFailedDelimiterEscaped() throws Exception {
+
+    String csv = "aaa,b\\,bb,ccc";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b","bb", "ccc"});   // different from Common CSVParser
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVTest.java
new file mode 100644
index 0000000..5dbb40d
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVTest.java
@@ -0,0 +1,245 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import com.opencsv.CSVParser;
+import com.opencsv.CSVReader;
+import com.opencsv.CSVWriter;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.io.StringWriter;
+
+public class OpenCSVTest {
+
+  /**
+   * no exception in creating csvParser with emtpy stream
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyStream() throws Exception {
+    String csv = "";
+
+    CSVParser jp = new CSVParser();
+    String[] columns = jp.parseLine(csv);
+    Assert.assertEquals("Should detect one column.", 1, columns.length);
+    Assert.assertEquals("Should detect one column with empty value.", new String[]{""}, columns);
+  }
+
+  /**
+   * in case of csv an empty line is still considered as row
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyRow() throws Exception {
+    String csv = "       ";
+    CSVParser jp = new CSVParser();
+
+    String[] columns = jp.parseLine(csv);
+    Assert.assertEquals("One column not detected.", 1, columns.length);
+    Assert.assertArrayEquals("Row should not be empty", new String[]{"       "}, columns);
+  }
+
+  @Test
+  public void testParse1Row() throws Exception {
+    String csv = "value1,c,10,10.1";
+
+    String[] cols = csv.split(",");
+    CSVParser jp = new CSVParser();
+    String[] columns = jp.parseLine(csv);
+    Assert.assertEquals("4 columns not detect", 4, columns.length);
+    Assert.assertArrayEquals("Row not equal!", cols, columns);
+  }
+
+  @Test
+  public void testParseMultipleRow() throws Exception {
+
+    String csv = "value1,c,10,10.1\n" +
+      "value2,c2,102,true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,',','"','\\');
+    ) {
+      String[] row1 = csvReader.readNext();
+      String[] row2 = csvReader.readNext();
+
+      Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"value1", "c", "10", "10.1"}, row1);
+
+      Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2);
+    }
+  }
+
+  @Test
+  public void testParseCustomSeparator() throws Exception {
+
+    String csv = "value1#c#10#10.1\n" +
+      "value2#c2#102#true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,'#','"','\\');
+    ) {
+      String[] row1 = csvReader.readNext();
+      String[] row2 = csvReader.readNext();
+
+      Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"value1", "c", "10", "10.1"}, row1);
+
+      Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2);
+    }
+  }
+
+
+  @Test
+  public void testParseCustomSeparatorAndQuote() throws Exception {
+
+    String csv = "\"valu#e1\"#c#10#10.1\n" +
+      "value2#c2#102#true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,'#','"','\\');
+    ) {
+      String[] row1 = csvReader.readNext();
+      String[] row2 = csvReader.readNext();
+
+      Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"valu#e1", "c", "10", "10.1"}, row1);
+
+      Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2);
+    }
+  }
+
+  @Test
+  public void testParseCustomSeparatorAndCustomQuote() throws Exception {
+
+    String csv = "\'valu#e1\'#c#10#10.1\n" +
+      "value2#c2#102#true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,'#','\'','\\');
+    ) {
+      String[] row1 = csvReader.readNext();
+      String[] row2 = csvReader.readNext();
+      String[] row3 = csvReader.readNext();
+
+      Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"valu#e1", "c", "10", "10.1"}, row1);
+
+      Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2);
+
+      Assert.assertArrayEquals("should match Null", null, row3);
+    }
+  }
+
+  @Test
+  public void testWriter() throws Exception {
+
+    String csv = "\'valu#e1\'#c#10#10.1\n" +
+      "value2#c2#102#true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,'#','\'','\\');
+      StringWriter sw = new StringWriter();
+      CSVWriter csvWriter = new CSVWriter(sw);
+    ) {
+      String[] row1 = csvReader.readNext();
+      csvWriter.writeNext(row1);
+      String[] row2 = csvReader.readNext();
+      csvWriter.writeNext(row2);
+
+      Assert.assertEquals("CSVWriter failed.","\"valu#e1\",\"c\",\"10\",\"10.1\"\n" +
+        "\"value2\",\"c2\",\"102\",\"true\"\n", sw.getBuffer().toString());
+    }
+  }
+
+  @Test
+  public void testWriterCustomSeparator() throws Exception {
+
+    String csv = "\'valu#e1\'#c#10#10.1\n" +
+      "value2#c2#102#true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,'#','\'','\\');
+      StringWriter sw = new StringWriter();
+      CSVWriter csvWriter = new CSVWriter(sw,'$');
+    ) {
+      String[] row1 = csvReader.readNext();
+      csvWriter.writeNext(row1);
+      String[] row2 = csvReader.readNext();
+      csvWriter.writeNext(row2);
+
+      Assert.assertEquals("CSVWriter failed.","\"valu#e1\"$\"c\"$\"10\"$\"10.1\"\n" +
+        "\"value2\"$\"c2\"$\"102\"$\"true\"\n", sw.getBuffer().toString());
+    }
+  }
+
+  @Test
+  public void testWriterCustomSeparatorAndEnline() throws Exception {
+
+    String csv = "value1,c,10,10.1\n" +
+      "value2,c2,102,true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,',','\'','\\');
+      StringWriter sw = new StringWriter();
+      CSVWriter csvWriter = new CSVWriter(sw,'\002',',',"\003");
+    ) {
+      String[] row1 = csvReader.readNext();
+      csvWriter.writeNext(row1,false);
+      String[] row2 = csvReader.readNext();
+      csvWriter.writeNext(row2,false);
+
+      Assert.assertEquals("CSVWriter failed.","value1\002c\00210\00210.1\003" +
+        "value2\002c2\002102\002true\003", sw.getBuffer().toString());
+    }
+  }
+
+  @Test
+  public void testWriterQuote() throws Exception {
+
+    String csv = "val#ue1,c,10,10.1\n" +
+      "'val,ue2',c2,102,true\n" +
+      "val\002ue3,c\0033,103,false";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,',','\'','\\');
+      StringWriter sw = new StringWriter();
+      CSVWriter csvWriter = new CSVWriter(sw,'\002','\'',"\003");
+    ) {
+      String[] row1 = csvReader.readNext();
+      csvWriter.writeNext(row1,false);
+      String[] row2 = csvReader.readNext();
+      csvWriter.writeNext(row2,false);
+      String[] row3 = csvReader.readNext();
+      csvWriter.writeNext(row3,false);
+
+      Assert.assertEquals("CSVWriter failed.","val#ue1\u0002c\u000210\u000210.1\u0003" +
+        "val,ue2\u0002c2\u0002102\u0002true\u0003" +
+        "'val\u0002ue3'\u0002c\u00033\u0002103\u0002false\u0003", sw.getBuffer().toString());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/ParseUtilsTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/ParseUtilsTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/ParseUtilsTest.java
new file mode 100644
index 0000000..12a7037
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/ParseUtilsTest.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive20.resources.upload;
+
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseUtils;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class ParseUtilsTest {
+  @Test
+  public void testDateFormats() {
+    Assert.assertTrue(ParseUtils.isDate("1970-01-01"));
+    Assert.assertTrue(ParseUtils.isDate("1970-01-01 "));
+    Assert.assertTrue(ParseUtils.isDate("0001-1-3"));
+    Assert.assertTrue(ParseUtils.isDate("1996-1-03"));
+    Assert.assertTrue(ParseUtils.isDate("1996-01-3"));
+    Assert.assertTrue(ParseUtils.isDate("1996-10-3"));
+    Assert.assertFalse(ParseUtils.isDate("1970-01-01 01:01:01"));
+    Assert.assertFalse(ParseUtils.isDate("1970-01-01 23:59:59.999999"));
+    Assert.assertFalse(ParseUtils.isDate("1970/01/01"));
+    Assert.assertFalse(ParseUtils.isDate("01-01-1970"));
+    Assert.assertFalse(ParseUtils.isDate("1970-13-01"));
+    Assert.assertFalse(ParseUtils.isDate("1970-01-32"));
+    Assert.assertFalse(ParseUtils.isDate("01/01/1970"));
+    Assert.assertFalse(ParseUtils.isDate("001-1-3"));
+  }
+
+  @Test
+  public void testTimestampFormats() {
+    Assert.assertFalse(ParseUtils.isTimeStamp("1999-11-30"));
+    Assert.assertFalse(ParseUtils.isTimeStamp("1999-12-31 23:59"));
+    Assert.assertTrue(ParseUtils.isTimeStamp("1999-12-31 23:59:59"));
+    Assert.assertTrue(ParseUtils.isTimeStamp("1999-12-31 23:59:59.100"));
+    Assert.assertTrue(ParseUtils.isTimeStamp("1999-12-31 23:59:59.999999"));
+    Assert.assertTrue(ParseUtils.isTimeStamp("1999-12-31 23:59:59.99999999"));
+    Assert.assertTrue(ParseUtils.isTimeStamp("1999-12-31 23:59:59.999999999"));
+    Assert.assertTrue(ParseUtils.isTimeStamp("1999-10-31 23:59:59.999999999"));
+    Assert.assertFalse(ParseUtils.isTimeStamp("1999-12-31 23:59:59.9999999999"));
+    Assert.assertFalse(ParseUtils.isTimeStamp("1999/12/31 23:59:59.9999999999"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java
new file mode 100644
index 0000000..1a0d34a
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import org.apache.ambari.view.hive.resources.uploads.query.RowFormat;
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive20.resources.uploads.HiveFileType;
+import org.apache.ambari.view.hive20.resources.uploads.query.DeleteQueryInput;
+import org.apache.ambari.view.hive20.resources.uploads.query.InsertFromQueryInput;
+import org.apache.ambari.view.hive20.resources.uploads.query.QueryGenerator;
+import org.apache.ambari.view.hive20.resources.uploads.query.TableInfo;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class QueryGeneratorTest {
+  @Test
+  public void testCreateTextFile() {
+
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
+
+    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.TEXTFILE, new RowFormat(',', '\\'));
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("Create query for text file not correct ","CREATE TABLE tableName (col1 CHAR(10), col2 STRING," +
+      " col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','" +
+      " ESCAPED BY '\\\\' STORED AS TEXTFILE;",qg.generateCreateQuery(ti));
+  }
+
+  @Test
+  public void testCreateORC() {
+
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
+
+    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.ORC, new RowFormat(',', '\\'));
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("Create query for text file not correct ","CREATE TABLE tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) STORED AS ORC;",qg.generateCreateQuery(ti));
+  }
+
+  @Test
+  public void testInsertWithoutUnhexFromQuery() {
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
+
+    InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable", cdl, Boolean.FALSE);
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("insert from one table to another not correct ","INSERT INTO TABLE toDB.toTable SELECT col1, col2, col3, col4, col5 FROM fromDB.fromTable;",qg.generateInsertFromQuery(ifqi));
+  }
+
+  @Test
+  public void testInsertWithUnhexFromQuery() {
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
+
+    InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable", cdl, Boolean.TRUE);
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("insert from one table to another not correct ","INSERT INTO TABLE toDB.toTable SELECT UNHEX(col1), UNHEX(col2), col3, UNHEX(col4), col5 FROM fromDB.fromTable;",qg.generateInsertFromQuery(ifqi));
+  }
+
+  @Test
+  public void testDropTableQuery() {
+
+    DeleteQueryInput deleteQueryInput = new DeleteQueryInput("dbName","tableName");
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("drop table query not correct ","DROP TABLE dbName.tableName;",qg.generateDropTableQuery(deleteQueryInput ));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java
new file mode 100644
index 0000000..340a921
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive20.resources.uploads.TableDataReader;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+public class TableDataReaderTest {
+
+  private class RowIter implements Iterator<Row> {
+    int numberOfRows;
+    int numberOfCols;
+    int index = 0 ;
+    ArrayList<Row> rows = new ArrayList<Row>();
+    public RowIter(int numberOfRows, int numberOfCols){
+      this.numberOfRows = numberOfRows;
+      this.numberOfCols = numberOfCols;
+      int x = 0 ;
+      for(int i = 0; i < this.numberOfRows; i++ ){
+        Object [] objArray = new Object[10];
+        for(int j = 0; j < this.numberOfCols; j++ ){
+          objArray[j] = x++ + "" ;
+        }
+        Row row = new Row(objArray);
+        rows.add(row);
+      }
+    }
+    @Override
+    public boolean hasNext() {
+      return index < numberOfRows;
+    }
+
+    @Override
+    public Row next() {
+      return rows.get(index++);
+    }
+
+    @Override
+    public void remove() {
+      throw new RuntimeException("Operation not supported.");
+    }
+
+    @Override
+    public String toString() {
+      return "RowIter{" +
+              "index=" + index +
+              ", rows=" + rows +
+              '}';
+    }
+  }
+
+  @Test
+  public void testCSVReader() throws IOException {
+    RowIter rowIter = new RowIter(10,10);
+    List<ColumnDescriptionImpl> colDescs = new LinkedList<>();
+    for(int i = 0 ; i < 10 ; i++ ) {
+      ColumnDescriptionImpl cd = new ColumnDescriptionImpl("col" + (i+1) , ColumnDescription.DataTypes.STRING.toString(), i);
+      colDescs.add(cd);
+    }
+
+    TableDataReader tableDataReader = new TableDataReader(rowIter, colDescs, false);
+
+    char del = TableDataReader.CSV_DELIMITER;
+    char[] first10 = {'0', del, '1', del, '2', del, '3', del, '4', del};
+    char [] buf = new char[10];
+    tableDataReader.read(buf,0,10);
+
+    Assert.assertArrayEquals(first10,buf);
+
+    char[] next11 = {'5', del, '6', del, '7', del, '8', del, '9', '\n', '1'}; //"5,6,7,8,9\n1".toCharArray();
+    char [] buf1 = new char[11];
+    tableDataReader.read(buf1,0,11);
+
+    Assert.assertArrayEquals(next11,buf1);
+
+    // read it fully
+    while( tableDataReader.read(buf,0,10) != -1 );
+
+    char [] last10 = {'9', '7', del, '9', '8', del, '9', '9', '\n', del}; //"97,98,99\n,".toCharArray(); // last comma is the left over of previous read.
+
+    Assert.assertArrayEquals(last10,buf);
+  }
+
+  @Test
+  public void testEmptyCSVReader() throws IOException {
+    RowIter rowIter = new RowIter(0,0);
+
+    TableDataReader tableDataReader = new TableDataReader(rowIter, null, false);
+
+    char[] first10 = new char [10];
+    char [] buf = new char[10];
+    for( int i = 0 ; i < 10 ; i++ ){
+      first10[i] = '\0';
+      buf[i] = '\0';
+    }
+
+    tableDataReader.read(buf,0,10);
+
+    Assert.assertArrayEquals(first10,buf);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/XMLParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/XMLParserTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/XMLParserTest.java
new file mode 100644
index 0000000..bf79a4a
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/XMLParserTest.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.xml.XMLParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class XMLParserTest {
+
+  @Test(expected = IOException.class)
+  public void testEmptyStream() throws Exception {
+    String xml = "";
+
+    try(
+      StringReader sr = new StringReader(xml);
+      XMLParser jp = new XMLParser(sr, null);
+      ) {
+        // creation of XMLParser will throw exception.
+    }
+  }
+
+  @Test
+  public void testEmptyRow() throws Exception {
+    String xml = "<table><row></row></table>";
+    try(
+      StringReader sr = new StringReader(xml);
+      XMLParser jp = new XMLParser(sr, null);
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should not be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should be empty",new Object[]{},iterator.next().getRow());
+    }
+  }
+
+
+  @Test
+  public void testEmptyTable() throws Exception {
+    String xml = "<table></table>";
+
+    try(
+      StringReader sr = new StringReader(xml);
+      XMLParser jp = new XMLParser(sr, null);
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws Exception {
+
+    String xml =
+    "<table>"
+    + "<row>"
+    + "<col name=\"key1\">value1</col>"
+    + "<col name=\"key2\">c</col>"
+    + "<col name=\"key3\">10</col>"
+    + "<col name=\"key4\">10.1</col>"
+    + "</row>"
+    + "</table>"  ;
+
+    try(
+      StringReader sr = new StringReader(xml);
+      XMLParser jp = new XMLParser(sr, null)
+    ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws Exception {
+    String xml =
+    "<table>"
+    + "<row>"
+    + "<col name=\"key1\">value1</col>"
+    + "<col name=\"key2\">c</col>"
+    + "<col name=\"key3\">10</col>"
+    + "<col name=\"key4\">10.1</col>"
+    + "</row>"
+    + "<row>"
+    + "<col name=\"key1\">value2</col>"
+    + "<col name=\"key2\">c2</col>"
+    + "<col name=\"key3\">102</col>"
+    + "<col name=\"key4\">true</col>"
+    + "</row>"
+    + "</table>"  ;
+
+    try(
+      StringReader sr = new StringReader(xml);
+      XMLParser jp = new XMLParser(sr, null)
+    ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/rest/postman/hive2.postman_collection.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/rest/postman/hive2.postman_collection.json b/contrib/views/hive20/src/test/rest/postman/hive2.postman_collection.json
new file mode 100644
index 0000000..c7ce0c9
--- /dev/null
+++ b/contrib/views/hive20/src/test/rest/postman/hive2.postman_collection.json
@@ -0,0 +1,107 @@
+{
+	"variables": [],
+	"info": {
+		"name": "hive2",
+		"_postman_id": "d3d966bf-9112-9017-908b-7bc820a5a962",
+		"description": "",
+		"schema": "https://schema.getpostman.com/json/collection/v2.0.0/collection.json"
+	},
+	"item": [
+		{
+			"name": "fetch  table",
+			"request": {
+				"auth": {
+					"type": "basic",
+					"basic": {
+						"username": "admin",
+						"password": "admin",
+						"saveHelperData": true,
+						"showPassword": false
+					}
+				},
+				"url": "http://c6401.ambari.apache.org:8080/api/v1/views/HIVE/versions/1.5.0/instances/AUTO_HIVE_INSTANCE/resources/ddl/databases/d1/tables/t2/info?_=1481634018195",
+				"method": "GET",
+				"header": [
+					{
+						"key": "X-Requested-By",
+						"value": "ambari",
+						"description": ""
+					},
+					{
+						"key": "Authorization",
+						"value": "Basic YWRtaW46YWRtaW4=",
+						"description": ""
+					}
+				],
+				"body": {},
+				"description": "fetch d1.t2 table"
+			},
+			"response": []
+		},
+		{
+			"name": "create new table table",
+			"request": {
+				"auth": {
+					"type": "basic",
+					"basic": {
+						"username": "admin",
+						"password": "admin",
+						"saveHelperData": true,
+						"showPassword": false
+					}
+				},
+				"url": "http://c6401.ambari.apache.org:8080/api/v1/views/HIVE/versions/1.5.0/instances/AUTO_HIVE_INSTANCE/resources/ddl/databases/d1/tables",
+				"method": "POST",
+				"header": [
+					{
+						"key": "X-Requested-By",
+						"value": "ambari",
+						"description": ""
+					},
+					{
+						"key": "Authorization",
+						"value": "Basic YWRtaW46YWRtaW4=",
+						"description": ""
+					}
+				],
+				"body": {
+					"mode": "raw",
+					"raw": "{\n    \"database\": \"d1\",\n    \"table\": \"t7\",\n    \"columns\": [\n      {\n        \"name\": \"col_name1\",\n        \"type\": \"string\",\n        \"comment\": \"col_name1 comment\"\n      },\n      {\n        \"name\": \"col_name2\",\n        \"type\": \"decimal(10,2)\",\n        \"comment\": \"col_name2 comment\"\n      }\n    ],\n    \"ddl\": \"CREATE TABLE `t2`(\\n  `col_name1` string COMMENT 'col_name1 comment', \\n  `col_name2` decimal(10,2) COMMENT 'col_name2 comment')\\nCOMMENT 'table t1 comment'\\nPARTITIONED BY ( \\n  `col_name3` string COMMENT 'col_name3 comment', \\n  `col_name4` char(1) COMMENT 'col_name4 comment')\\nCLUSTERED BY ( \\n  col_name1, \\n  col_name2) \\nSORTED BY ( \\n  col_name1 ASC, \\n  col_name2 DESC) \\nINTO 5 BUCKETS\\nROW FORMAT DELIMITED \\n  FIELDS TERMINATED BY ',' \\nWITH SERDEPROPERTIES ( \\n  'escape.delim'='\\\\\\\\') \\nSTORED AS INPUTFORMAT \\n  'org.apache.hadoop.mapred.SequenceFileInputFormat' \\nOUTPUTFORMAT \\n  'or
 g.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'\\nLOCATION\\n  'hdfs://c6401.ambari.apache.org:8020/user/hive/tables/d1/t1'\\nTBLPROPERTIES (\\n  'NO_AUTO_COMPACTION'='true', \\n  'immutable'='false', \\n  'orc.compress'='SNAPPY', \\n  'transient_lastDdlTime'='1481520077')\\n\",\n    \"partitionInfo\": {\n      \"columns\": [\n        {\n          \"name\": \"col_name4\",\n          \"type\": \"char(1)\",\n          \"comment\": \"col_name4 comment\"\n        },\n        {\n          \"name\": \"col_name3\",\n          \"type\": \"string\",\n          \"comment\": \"col_name3 comment\"\n        }\n      ]\n    },\n    \"detailedInfo\": {\n      \"dbName\": \"d1\",\n      \"owner\": \"admin\",\n      \"createTime\": \"Mon Dec 12 05:21:17 UTC 2016\",\n      \"lastAccessTime\": \"UNKNOWN\",\n      \"retention\": \"0\",\n      \"tableType\": \"MANAGED_TABLE\",\n      \"location\": \"hdfs://c6401.ambari.apache.org:8020/user/hive/tables/d1/t1\",\n      \"parameters\": {\n        
 \"immutable\": \"false\",\n        \"orc.compress\": \"SNAPPY\",\n        \"transient_lastDdlTime\": \"1481520077\",\n        \"NO_AUTO_COMPACTION\": \"true\",\n        \"comment\": \"table t1 comment\",\n        \"SORTBUCKETCOLSPREFIX\": \"TRUE\"\n      }\n    },\n    \"storageInfo\": {\n      \"serdeLibrary\": \"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe\",\n      \"inputFormat\": \"org.apache.hadoop.mapred.SequenceFileInputFormat\",\n      \"outputFormat\": \"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat\",\n      \"compressed\": \"No\",\n      \"numBuckets\": \"5\",\n      \"bucketCols\": [\n        \"col_name1\",\n        \" col_name2\"\n      ],\n      \"sortCols\": [\n        {\n          \"columnName\": \"col_name1\",\n          \"order\": \"ASC\"\n        },\n        {\n          \"columnName\": \"col_name2\",\n          \"order\": \"DESC\"\n        }\n      ],\n      \"parameters\": {\n        \"escape.delim\": \"\\\\\\\\\",\n        \"field.delim\": \"
 ,\",\n        \"serialization.format\": \",\"\n      }\n    }\n  }"
+				},
+				"description": "create new table table"
+			},
+			"response": []
+		},
+		{
+			"name": "delete  table",
+			"request": {
+				"auth": {
+					"type": "basic",
+					"basic": {
+						"username": "admin",
+						"password": "admin",
+						"saveHelperData": true,
+						"showPassword": false
+					}
+				},
+				"url": "http://c6401.ambari.apache.org:8080/api/v1/views/HIVE/versions/1.5.0/instances/AUTO_HIVE_INSTANCE/resources/ddl/databases/d1/tables/t9",
+				"method": "DELETE",
+				"header": [
+					{
+						"key": "X-Requested-By",
+						"value": "ambari",
+						"description": ""
+					},
+					{
+						"key": "Authorization",
+						"value": "Basic YWRtaW46YWRtaW4=",
+						"description": ""
+					}
+				],
+				"body": {},
+				"description": "delete d1.t2 table"
+			},
+			"response": []
+		}
+	]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pom.xml b/contrib/views/pom.xml
index b6d4176..7f02a43 100644
--- a/contrib/views/pom.xml
+++ b/contrib/views/pom.xml
@@ -48,6 +48,7 @@
     <module>storm</module>
     <module>hueambarimigration</module>
     <module>hive-next</module>
+    <module>hive20</module>
     <module>wfmanager</module>
     <!--ambari-views-package should be last in the module list for it to function properly-->
     <module>ambari-views-package</module>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d6418ff..dcdbab3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -320,6 +320,8 @@
             <exclude>contrib/views/commons/src/main/resources/ui/*/tests/**/vendor/**</exclude>
             <exclude>contrib/views/hive-next/src/main/resources/ui/hive-web/vendor/codemirror/**</exclude>
             <exclude>contrib/views/hive-next/src/main/resources/ui/hive-web/.bowerrc</exclude>
+            <exclude>contrib/views/hive20/src/main/resources/ui/vendor/codemirror/**</exclude>
+            <exclude>contrib/views/hive20/src/main/resources/ui/.bowerrc</exclude>
             <exclude>contrib/views/files/src/main/resources/ui/.bowerrc</exclude>
             <exclude>contrib/views/files/src/main/resources/ui/bower_components/**</exclude>
             <exclude>contrib/views/files/src/main/resources/ui/node/**</exclude>


[12/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
new file mode 100644
index 0000000..a2c9396
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.atsJobs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.commons.io.IOUtils;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+
+public class ATSRequestsDelegateImpl implements ATSRequestsDelegate {
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(ATSRequestsDelegateImpl.class);
+  public static final String EMPTY_ENTITIES_JSON = "{ \"entities\" : [  ] }";
+
+  private ViewContext context;
+  private String atsUrl;
+
+  public ATSRequestsDelegateImpl(ViewContext context, String atsUrl) {
+    this.context = context;
+    this.atsUrl = addProtocolIfMissing(atsUrl);
+  }
+
+  private String addProtocolIfMissing(String atsUrl) {
+    if (!atsUrl.matches("^[^:]+://.*$"))
+      atsUrl = "http://" + atsUrl;
+    return atsUrl;
+  }
+
+  @Override
+  public String hiveQueryIdDirectUrl(String entity) {
+    return atsUrl + "/ws/v1/timeline/HIVE_QUERY_ID/" + entity;
+  }
+
+  @Override
+  public String hiveQueryIdOperationIdUrl(String operationId) {
+    // ATS parses operationId started with digit as integer and not returns the response.
+    // Quotation prevents this.
+    return atsUrl + "/ws/v1/timeline/HIVE_QUERY_ID?primaryFilter=operationid:%22" + operationId + "%22";
+  }
+
+  @Override
+  public String tezDagDirectUrl(String entity) {
+    return atsUrl + "/ws/v1/timeline/TEZ_DAG_ID/" + entity;
+  }
+
+  @Override
+  public String tezDagNameUrl(String name) {
+    return atsUrl + "/ws/v1/timeline/TEZ_DAG_ID?primaryFilter=dagName:" + name;
+  }
+
+  @Override
+  public String tezVerticesListForDAGUrl(String dagId) {
+    return atsUrl + "/ws/v1/timeline/TEZ_VERTEX_ID?primaryFilter=TEZ_DAG_ID:" + dagId;
+  }
+
+  @Override
+  public JSONObject hiveQueryIdsForUser(String username) {
+    String hiveQueriesListUrl = atsUrl + "/ws/v1/timeline/HIVE_QUERY_ID?primaryFilter=requestuser:" + username;
+    String response = readFromWithDefault(hiveQueriesListUrl, "{ \"entities\" : [  ] }");
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  @Override
+  public JSONObject hiveQueryIdByOperationId(String operationId) {
+    String hiveQueriesListUrl = hiveQueryIdOperationIdUrl(operationId);
+    String response = readFromWithDefault(hiveQueriesListUrl, EMPTY_ENTITIES_JSON);
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  @Override
+  public JSONObject tezDagByName(String name) {
+    String tezDagUrl = tezDagNameUrl(name);
+    String response = readFromWithDefault(tezDagUrl, EMPTY_ENTITIES_JSON);
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  @Override
+  public JSONObject tezDagByEntity(String entity) {
+    String tezDagEntityUrl = tezDagEntityUrl(entity);
+    String response = readFromWithDefault(tezDagEntityUrl, EMPTY_ENTITIES_JSON);
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  /**
+   * fetches the HIVE_QUERY_ID from ATS for given user between given time period
+   * @param username: username for which to fetch hive query IDs
+   * @param startTime: time in miliseconds, inclusive
+   * @param endTime: time in miliseconds, exclusive
+   * @return
+   */
+  @Override
+  public JSONObject hiveQueryIdsForUserByTime(String username, long startTime, long endTime) {
+    StringBuilder url = new StringBuilder();
+    url.append(atsUrl).append("/ws/v1/timeline/HIVE_QUERY_ID?")
+      .append("windowStart=").append(startTime)
+      .append("&windowEnd=").append(endTime)
+      .append("&primaryFilter=requestuser:").append(username);
+    String hiveQueriesListUrl = url.toString();
+
+    String response = readFromWithDefault(hiveQueriesListUrl, EMPTY_ENTITIES_JSON);
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  @Override
+  public JSONObject hiveQueryEntityByEntityId(String hiveEntityId) {
+    StringBuilder url = new StringBuilder();
+    url.append(atsUrl).append("/ws/v1/timeline/HIVE_QUERY_ID/").append(hiveEntityId);
+    String hiveQueriesListUrl = url.toString();
+    String response = readFromWithDefault(hiveQueriesListUrl, EMPTY_ENTITIES_JSON);
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  private String tezDagEntityUrl(String entity) {
+    return atsUrl + "/ws/v1/timeline/TEZ_DAG_ID?primaryFilter=callerId:" + entity;
+  }
+
+  public boolean checkATSStatus() throws IOException {
+    String url = atsUrl + "/ws/v1/timeline/";
+    InputStream responseInputStream = context.getURLStreamProvider().readAsCurrent(url, "GET",
+            (String)null, new HashMap<String, String>());
+     IOUtils.toString(responseInputStream);
+    return true;
+  }
+
+  @Override
+  public JSONObject tezVerticesListForDAG(String dagId) {
+    String response = readFromWithDefault(tezVerticesListForDAGUrl(dagId), "{ \"entities\" : [  ] }");
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+
+
+  protected String readFromWithDefault(String atsUrl, String defaultResponse) {
+    String response;
+    try {
+      InputStream responseInputStream = context.getURLStreamProvider().readAsCurrent(atsUrl, "GET",
+          (String)null, new HashMap<String, String>());
+      response = IOUtils.toString(responseInputStream);
+    } catch (IOException e) {
+      LOG.error("Error while reading from ATS", e);
+      response = defaultResponse;
+    }
+    return response;
+  }
+
+  public String getAtsUrl() {
+    return atsUrl;
+  }
+
+  public void setAtsUrl(String atsUrl) {
+    this.atsUrl = atsUrl;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/HiveQueryId.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/HiveQueryId.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/HiveQueryId.java
new file mode 100644
index 0000000..cb261d1
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/HiveQueryId.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.atsJobs;
+
+import org.json.simple.JSONObject;
+
+import java.util.List;
+
+public class HiveQueryId {
+  public static long ATS_15_RESPONSE_VERSION = 2; // version returned from ATS 1.5 release
+
+  public String url;
+
+  public String entity;
+  public String query;
+
+  public List<String> dagNames;
+
+  public List<JSONObject> stages;
+
+  public long starttime;
+  public long duration;
+  public String operationId;
+  public String user;
+  public long version;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/IATSParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/IATSParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/IATSParser.java
new file mode 100644
index 0000000..e925bb0
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/IATSParser.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.atsJobs;
+
+import java.util.List;
+
+public interface IATSParser {
+  List<HiveQueryId> getHiveQueryIdsForUser(String username);
+
+  List<TezVertexId> getVerticesForDAGId(String dagId);
+
+  HiveQueryId getHiveQueryIdByOperationId(String guidString);
+
+  TezDagId getTezDAGByName(String name);
+
+  TezDagId getTezDAGByEntity(String entity);
+
+  List<HiveQueryId> getHiveQueryIdsForUserByTime(String username, long startTime, long endTime);
+
+  HiveQueryId getHiveQueryIdByHiveEntityId(String hiveEntityId);
+
+  List<HiveQueryId> getHiveQueryIdByEntityList(List<String> hiveEntityIds);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/TezDagId.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/TezDagId.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/TezDagId.java
new file mode 100644
index 0000000..184436c
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/TezDagId.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.atsJobs;
+
+public class TezDagId {
+  public static final String STATUS_UNKNOWN = "UNKNOWN";
+  public String applicationId = "";
+  public String entity = "";
+  public String status = STATUS_UNKNOWN;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/TezVertexId.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/TezVertexId.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/TezVertexId.java
new file mode 100644
index 0000000..8acd683
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/TezVertexId.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.atsJobs;
+
+public class TezVertexId {
+  public String entity;
+  public String vertexName;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMParser.java
new file mode 100644
index 0000000..ee96e05
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMParser.java
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.rm;
+
+import org.apache.ambari.view.hive20.resources.jobs.atsJobs.TezVertexId;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Parser of Resource Manager responses
+ */
+public class RMParser {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(RMParser.class);
+  private RMRequestsDelegate delegate;
+
+  public RMParser(RMRequestsDelegate delegate) {
+    this.delegate = delegate;
+  }
+
+  /**
+   * Progress of DAG
+   * @param appId App Id
+   * @param dagId DAG Id
+   * @return progress of DAG
+   */
+  public Double getDAGProgress(String appId, String dagId) {
+    String dagIdx = parseDagIdIndex(dagId);
+    JSONObject progresses = delegate.dagProgress(appId, dagIdx);
+
+    double dagProgressValue;
+    if (progresses != null) {
+      JSONObject dagProgress = (JSONObject) progresses.get("dagProgress");
+      dagProgressValue = (Double) (dagProgress.get("progress"));
+    } else {
+      LOG.error("Error while retrieving progress of " + appId + ":" + dagId + ". 0 assumed.");
+      dagProgressValue = 0;
+    }
+    return dagProgressValue;
+  }
+
+  /**
+   * Progress of vertices
+   * @param appId App Id
+   * @param dagId DAG Id
+   * @param vertices vertices list
+   * @return list of vertices
+   */
+  public List<VertexProgress> getDAGVerticesProgress(String appId, String dagId, List<TezVertexId> vertices) {
+    String dagIdx = parseDagIdIndex(dagId);
+
+    Map<String, String> vertexIdToEntityMapping = new HashMap<String, String>();
+    StringBuilder builder = new StringBuilder();
+    if (vertices.size() > 0) {
+      for (TezVertexId vertexId : vertices) {
+        String[] parts = vertexId.entity.split("_");
+        String vertexIdx = parts[parts.length - 1];
+        builder.append(vertexIdx).append(",");
+
+        vertexIdToEntityMapping.put(vertexId.entity, vertexId.vertexName);
+      }
+      builder.setLength(builder.length() - 1); // remove last comma
+    }
+
+    String commaSeparatedVertices = builder.toString();
+
+    List<VertexProgress> parsedVertexProgresses = new LinkedList<VertexProgress>();
+    JSONObject vertexProgressesResponse = delegate.verticesProgress(
+        appId, dagIdx, commaSeparatedVertices);
+    if (vertexProgressesResponse == null) {
+      LOG.error("Error while retrieving progress of vertices " +
+          appId + ":" + dagId + ":" + commaSeparatedVertices + ". 0 assumed for all vertices.");
+      for (TezVertexId vertexId : vertices) {
+        VertexProgress vertexProgressInfo = new VertexProgress();
+        vertexProgressInfo.name = vertexId.vertexName;
+        vertexProgressInfo.progress = 0.0;
+        parsedVertexProgresses.add(vertexProgressInfo);
+      }
+      return parsedVertexProgresses;
+    }
+    JSONArray vertexProgresses = (JSONArray) vertexProgressesResponse.get("vertexProgresses");
+
+    for (Object vertex : vertexProgresses) {
+      JSONObject jsonObject = (JSONObject) vertex;
+
+      VertexProgress vertexProgressInfo = new VertexProgress();
+      vertexProgressInfo.id = (String) jsonObject.get("id");
+      vertexProgressInfo.name = vertexIdToEntityMapping.get(vertexProgressInfo.id);
+      vertexProgressInfo.progress = (Double) jsonObject.get("progress");
+
+      parsedVertexProgresses.add(vertexProgressInfo);
+    }
+    return parsedVertexProgresses;
+  }
+
+  public String parseDagIdIndex(String dagId) {
+    String[] dagIdParts = dagId.split("_");
+    return dagIdParts[dagIdParts.length - 1];
+  }
+
+  public static class VertexProgress {
+    public String id;
+    public String name;
+    public Double progress;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMParserFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMParserFactory.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMParserFactory.java
new file mode 100644
index 0000000..64f268a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMParserFactory.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.rm;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.utils.ambari.AmbariApi;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class RMParserFactory {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(RMParserFactory.class);
+
+  private final ViewContext context;
+  private final AmbariApi ambariApi;
+
+  public RMParserFactory(ViewContext context) {
+    this.context = context;
+    this.ambariApi = new AmbariApi(context);
+  }
+
+  public RMParser getRMParser() {
+    String rmUrl = getRMUrl();
+
+    RMRequestsDelegate delegate = new RMRequestsDelegateImpl(context, rmUrl);
+    return new RMParser(delegate);
+  }
+
+  public String getRMUrl() {
+    return ambariApi.getServices().getRMUrl();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMRequestsDelegate.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMRequestsDelegate.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMRequestsDelegate.java
new file mode 100644
index 0000000..e661833
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMRequestsDelegate.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.rm;
+
+import org.json.simple.JSONObject;
+
+public interface RMRequestsDelegate {
+  String dagProgressUrl(String appId, String dagIdx);
+
+  String verticesProgressUrl(String appId, String dagIdx, String vertices);
+
+  JSONObject dagProgress(String appId, String dagIdx);
+
+  JSONObject verticesProgress(String appId, String dagIdx, String commaSeparatedVertices);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMRequestsDelegateImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMRequestsDelegateImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMRequestsDelegateImpl.java
new file mode 100644
index 0000000..ed07b55
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/rm/RMRequestsDelegateImpl.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.rm;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.commons.io.IOUtils;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+
+public class RMRequestsDelegateImpl implements RMRequestsDelegate {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(RMRequestsDelegateImpl.class);
+  public static final String EMPTY_ENTITIES_JSON = "{ \"entities\" : [  ] }";
+
+  private ViewContext context;
+  private String rmUrl;
+
+  public RMRequestsDelegateImpl(ViewContext context, String rmUrl) {
+    this.context = context;
+    this.rmUrl = rmUrl;
+  }
+
+  @Override
+  public String dagProgressUrl(String appId, String dagIdx) {
+    return rmUrl + String.format("/proxy/%s/ws/v1/tez/dagProgress?dagID=%s", appId, dagIdx);
+  }
+
+  @Override
+  public String verticesProgressUrl(String appId, String dagIdx, String vertices) {
+    return rmUrl + String.format("/proxy/%s/ws/v1/tez/vertexProgresses?dagID=%s&vertexID=%s", appId, dagIdx, vertices);
+  }
+
+  @Override
+  public JSONObject dagProgress(String appId, String dagIdx) {
+    String url = dagProgressUrl(appId, dagIdx);
+    String response;
+    try {
+      InputStream responseInputStream = context.getURLStreamProvider().readFrom(url, "GET",
+          (String)null, new HashMap<String, String>());
+      response = IOUtils.toString(responseInputStream);
+    } catch (IOException e) {
+      throw new ServiceFormattedException(
+          String.format("R010 DAG %s in app %s not found or ResourceManager is unreachable", dagIdx, appId));
+    }
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  @Override
+  public JSONObject verticesProgress(String appId, String dagIdx, String commaSeparatedVertices) {
+    String url = verticesProgressUrl(appId, dagIdx, commaSeparatedVertices);
+    String response;
+    try {
+      InputStream responseInputStream = context.getURLStreamProvider().readFrom(url, "GET",
+          (String)null, new HashMap<String, String>());
+      response = IOUtils.toString(responseInputStream);
+    } catch (IOException e) {
+      throw new ServiceFormattedException(
+          String.format("R020 DAG %s in app %s not found or ResourceManager is unreachable", dagIdx, appId));
+    }
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  protected String readFromWithDefault(String url, String defaultResponse) {
+    String response;
+    try {
+      InputStream responseInputStream = context.getURLStreamProvider().readFrom(url, "GET",
+          (String)null, new HashMap<String, String>());
+      response = IOUtils.toString(responseInputStream);
+    } catch (IOException e) {
+      LOG.error("Error while reading from RM", e);
+      response = defaultResponse;
+    }
+    return response;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/IJobControllerFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/IJobControllerFactory.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/IJobControllerFactory.java
new file mode 100644
index 0000000..b11f124
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/IJobControllerFactory.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.viewJobs;
+
+public interface IJobControllerFactory {
+  JobController createControllerForJob(Job job);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/Job.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/Job.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/Job.java
new file mode 100644
index 0000000..d64e354
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/Job.java
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.viewJobs;
+
+
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+import org.apache.ambari.view.hive20.persistence.utils.PersonalResource;
+
+import java.io.Serializable;
+
+/**
+ * Interface for ExecuteJob bean to create Proxy for it
+ */
+public interface Job extends Serializable,Indexed,PersonalResource {
+  String JOB_STATE_UNKNOWN = "UNKNOWN";
+  String JOB_STATE_INITIALIZED = "INITIALIZED";
+  String JOB_STATE_RUNNING = "RUNNING";
+  String JOB_STATE_FINISHED = "SUCCEEDED";
+  String JOB_STATE_CANCELED = "CANCELED";
+  String JOB_STATE_CLOSED = "CLOSED";
+  String JOB_STATE_ERROR = "ERROR";
+  String JOB_STATE_PENDING = "PENDING";
+
+  String getId();
+
+  void setId(String id);
+
+  String getOwner();
+
+  void setOwner(String owner);
+
+  String getTitle();
+
+  void setTitle(String title);
+
+  String getQueryFile();
+
+  void setQueryFile(String queryFile);
+
+  Long getDateSubmitted();
+
+  void setDateSubmitted(Long dateSubmitted);
+
+  Long getDuration();
+
+  void setDuration(Long duration);
+
+  String getStatus();
+
+  void setStatus(String status);
+
+  String getForcedContent();
+
+  void setForcedContent(String forcedContent);
+
+  String getQueryId();
+
+  void setQueryId(String queryId);
+
+  String getStatusDir();
+
+  void setStatusDir(String statusDir);
+
+  String getDataBase();
+
+  void setDataBase(String dataBase);
+
+  String getLogFile();
+
+  void setLogFile(String logFile);
+
+  String getConfFile();
+
+  void setConfFile(String confFile);
+
+  String getApplicationId();
+
+  void setApplicationId(String applicationId);
+
+  String getDagName();
+
+  void setDagName(String dagName);
+
+  String getDagId();
+
+  void setDagId(String dagId);
+
+  String getSessionTag();
+
+  void setSessionTag(String sessionTag);
+
+  String getSqlState();
+
+  void setSqlState(String sqlState);
+
+  String getStatusMessage();
+
+  void setStatusMessage(String message);
+
+  String getReferrer();
+
+  void setReferrer(String referrer);
+
+  String getGlobalSettings();
+
+  void setGlobalSettings(String globalSettings);
+
+  String getGuid();
+
+  void setGuid(String guid);
+
+  String getHiveQueryId();
+
+  void setHiveQueryId(String hiveQueryId);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobController.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobController.java
new file mode 100644
index 0000000..e8a2fe3
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobController.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.viewJobs;
+
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+
+public interface JobController {
+
+  void submit() throws Throwable;
+
+  void cancel() throws ItemNotFound;
+
+  Job getJob();
+
+  /**
+   * Use carefully. Returns unproxied bean object
+   * @return unproxied bean object
+   */
+  Job getJobPOJO();
+
+  void afterCreation();
+
+  void update();
+
+  boolean isModified();
+
+  void clearModified();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobControllerFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobControllerFactory.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobControllerFactory.java
new file mode 100644
index 0000000..cc79843
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobControllerFactory.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.viewJobs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+
+public class JobControllerFactory implements IJobControllerFactory {
+  private SharedObjectsFactory sharedObjectsFactory;
+  private ViewContext context;
+
+  public JobControllerFactory(ViewContext context, SharedObjectsFactory sharedObjectsFactory) {
+    this.sharedObjectsFactory = sharedObjectsFactory;
+    this.context = context;
+  }
+
+  @Override
+  public JobController createControllerForJob(Job job) {
+    return new JobControllerImpl(context, job,
+        sharedObjectsFactory.getSavedQueryResourceManager(),
+        sharedObjectsFactory.getATSParser(),
+        sharedObjectsFactory.getHdfsApi());
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobControllerImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobControllerImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobControllerImpl.java
new file mode 100644
index 0000000..1511e5b
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobControllerImpl.java
@@ -0,0 +1,311 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.viewJobs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.ConnectionFactory;
+import org.apache.ambari.view.hive20.ConnectionSystem;
+import org.apache.ambari.view.hive20.actor.message.HiveJob;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.client.AsyncJobRunner;
+import org.apache.ambari.view.hive20.client.AsyncJobRunnerImpl;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.resources.jobs.ModifyNotificationDelegate;
+import org.apache.ambari.view.hive20.resources.jobs.ModifyNotificationInvocationHandler;
+import org.apache.ambari.view.hive20.resources.jobs.atsJobs.IATSParser;
+import org.apache.ambari.view.hive20.resources.savedQueries.SavedQuery;
+import org.apache.ambari.view.hive20.resources.savedQueries.SavedQueryResourceManager;
+import org.apache.ambari.view.hive20.utils.BadRequestFormattedException;
+import org.apache.ambari.view.hive20.utils.FilePaginator;
+import org.apache.ambari.view.hive20.utils.MisconfigurationFormattedException;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.apache.commons.lang3.ArrayUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.lang.reflect.Proxy;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+public class JobControllerImpl implements JobController, ModifyNotificationDelegate {
+    private final static Logger LOG =
+            LoggerFactory.getLogger(JobControllerImpl.class);
+
+    private ViewContext context;
+    private HdfsApi hdfsApi;
+    private Job jobUnproxied;
+    private Job job;
+    private boolean modified;
+
+    private SavedQueryResourceManager savedQueryResourceManager;
+    private IATSParser atsParser;
+
+    /**
+     * JobController constructor
+     * Warning: Create JobControllers ONLY using JobControllerFactory!
+     */
+    public JobControllerImpl(ViewContext context, Job job,
+                             SavedQueryResourceManager savedQueryResourceManager,
+                             IATSParser atsParser,
+                             HdfsApi hdfsApi) {
+        this.context = context;
+        setJobPOJO(job);
+        this.savedQueryResourceManager = savedQueryResourceManager;
+        this.atsParser = atsParser;
+        this.hdfsApi = hdfsApi;
+
+    }
+
+    public String getQueryForJob() {
+        FilePaginator paginator = new FilePaginator(job.getQueryFile(), hdfsApi);
+        String query;
+        try {
+            query = paginator.readPage(0);  //warning - reading only 0 page restricts size of query to 1MB
+        } catch (IOException e) {
+            throw new ServiceFormattedException("F030 Error when reading file " + job.getQueryFile(), e);
+        } catch (InterruptedException e) {
+            throw new ServiceFormattedException("F030 Error when reading file " + job.getQueryFile(), e);
+        }
+        return query;
+    }
+
+    private static final String DEFAULT_DB = "default";
+
+    public String getJobDatabase() {
+        if (job.getDataBase() != null) {
+            return job.getDataBase();
+        } else {
+            return DEFAULT_DB;
+        }
+    }
+
+
+    @Override
+    public void submit() throws Throwable {
+        String jobDatabase = getJobDatabase();
+        String query = getQueryForJob();
+        ConnectionSystem system = ConnectionSystem.getInstance();
+        AsyncJobRunner asyncJobRunner = new AsyncJobRunnerImpl(context, system.getOperationController(context), system.getActorSystem());
+        SQLStatementJob asyncJob = new SQLStatementJob(HiveJob.Type.ASYNC, getStatements(jobDatabase, query), context.getUsername(), job.getId(), job.getLogFile());
+        asyncJobRunner.submitJob(getHiveConnectionConfig(), asyncJob, job);
+
+    }
+
+    private String[] getStatements(String jobDatabase, String query) {
+      String[] queries = query.split(";");
+
+
+      String[] strings = {"use " + jobDatabase};
+      return ArrayUtils.addAll(strings, queries);
+    }
+
+
+    @Override
+    public void cancel() throws ItemNotFound {
+      ConnectionSystem system = ConnectionSystem.getInstance();
+      AsyncJobRunner asyncJobRunner = new AsyncJobRunnerImpl(context, system.getOperationController(context), system.getActorSystem());
+      asyncJobRunner.cancelJob(job.getId(), context.getUsername());
+    }
+
+    @Override
+    public void update() {
+        updateJobDuration();
+    }
+
+
+    @Override
+    public Job getJob() {
+        return job;
+    }
+
+    /**
+     * Use carefully. Returns unproxied bean object
+     * @return unproxied bean object
+     */
+    @Override
+    public Job getJobPOJO() {
+        return jobUnproxied;
+    }
+
+    public void setJobPOJO(Job jobPOJO) {
+        Job jobModifyNotificationProxy = (Job) Proxy.newProxyInstance(jobPOJO.getClass().getClassLoader(),
+                new Class[]{Job.class},
+                new ModifyNotificationInvocationHandler(jobPOJO, this));
+        this.job = jobModifyNotificationProxy;
+
+        this.jobUnproxied = jobPOJO;
+    }
+
+
+    @Override
+    public void afterCreation() {
+        setupStatusDirIfNotPresent();
+        setupQueryFileIfNotPresent();
+        setupLogFileIfNotPresent();
+
+        setCreationDate();
+    }
+
+    public void setupLogFileIfNotPresent() {
+        if (job.getLogFile() == null || job.getLogFile().isEmpty()) {
+            setupLogFile();
+        }
+    }
+
+    public void setupQueryFileIfNotPresent() {
+        if (job.getQueryFile() == null || job.getQueryFile().isEmpty()) {
+            setupQueryFile();
+        }
+    }
+
+    public void setupStatusDirIfNotPresent() {
+        if (job.getStatusDir() == null || job.getStatusDir().isEmpty()) {
+            setupStatusDir();
+        }
+    }
+
+    private static final long MillisInSecond = 1000L;
+
+  public void updateJobDuration() {
+    job.setDuration((System.currentTimeMillis() / MillisInSecond) - (job.getDateSubmitted() / MillisInSecond));
+  }
+
+  public void setCreationDate() {
+    job.setDateSubmitted(System.currentTimeMillis());
+  }
+
+  private void setupLogFile() {
+    LOG.debug("Creating log file for job#" + job.getId());
+
+        String logFile = job.getStatusDir() + "/" + "logs";
+        try {
+            HdfsUtil.putStringToFile(hdfsApi, logFile, "");
+        } catch (HdfsApiException e) {
+            throw new ServiceFormattedException(e);
+        }
+
+        job.setLogFile(logFile);
+        LOG.debug("Log file for job#" + job.getId() + ": " + logFile);
+    }
+
+    private void setupStatusDir() {
+        String newDirPrefix = makeStatusDirectoryPrefix();
+        String newDir = null;
+        try {
+            newDir = HdfsUtil.findUnallocatedFileName(hdfsApi, newDirPrefix, "");
+        } catch (HdfsApiException e) {
+            throw new ServiceFormattedException(e);
+        }
+
+        job.setStatusDir(newDir);
+        LOG.debug("Status dir for job#" + job.getId() + ": " + newDir);
+    }
+
+    private String makeStatusDirectoryPrefix() {
+        String userScriptsPath = context.getProperties().get("jobs.dir");
+
+        if (userScriptsPath == null) { // TODO: move check to initialization code
+            String msg = "jobs.dir is not configured!";
+            LOG.error(msg);
+            throw new MisconfigurationFormattedException("jobs.dir");
+        }
+
+        String normalizedName = String.format("hive-job-%s", job.getId());
+        String timestamp = new SimpleDateFormat("yyyy-MM-dd_hh-mm").format(new Date());
+        return String.format(userScriptsPath +
+                "/%s-%s", normalizedName, timestamp);
+    }
+
+    private void setupQueryFile() {
+        String statusDir = job.getStatusDir();
+        assert statusDir != null : "setupStatusDir() should be called first";
+
+        String jobQueryFilePath = statusDir + "/" + "query.hql";
+
+        try {
+
+            if (job.getForcedContent() != null) {
+
+                HdfsUtil.putStringToFile(hdfsApi, jobQueryFilePath, job.getForcedContent());
+                job.setForcedContent("");  // prevent forcedContent to be written to DB
+
+            } else if (job.getQueryId() != null) {
+
+                String savedQueryFile = getRelatedSavedQueryFile();
+                hdfsApi.copy(savedQueryFile, jobQueryFilePath);
+                job.setQueryFile(jobQueryFilePath);
+
+            } else {
+
+                throw new BadRequestFormattedException("queryId or forcedContent should be passed!", null);
+
+            }
+
+        } catch (IOException e) {
+            throw new ServiceFormattedException("F040 Error when creating file " + jobQueryFilePath, e);
+        } catch (InterruptedException e) {
+            throw new ServiceFormattedException("F040 Error when creating file " + jobQueryFilePath, e);
+        } catch (HdfsApiException e) {
+            throw new ServiceFormattedException(e);
+        }
+        job.setQueryFile(jobQueryFilePath);
+
+        LOG.debug("Query file for job#" + job.getId() + ": " + jobQueryFilePath);
+    }
+
+
+    private ConnectionConfig getHiveConnectionConfig() {
+        return ConnectionFactory.create(context);
+    }
+
+    private String getRelatedSavedQueryFile() {
+        SavedQuery savedQuery;
+        try {
+            savedQuery = savedQueryResourceManager.read(job.getQueryId());
+        } catch (ItemNotFound itemNotFound) {
+            throw new BadRequestFormattedException("queryId not found!", itemNotFound);
+        }
+        return savedQuery.getQueryFile();
+    }
+
+    @Override
+    public boolean onModification(Object object) {
+        setModified(true);
+        return true;
+    }
+
+    @Override
+    public boolean isModified() {
+        return modified;
+    }
+
+    public void setModified(boolean modified) {
+        this.modified = modified;
+    }
+
+    @Override
+    public void clearModified() {
+        setModified(false);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobImpl.java
new file mode 100644
index 0000000..85ffaf2
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobImpl.java
@@ -0,0 +1,335 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.viewJobs;
+
+import org.apache.commons.beanutils.PropertyUtils;
+
+import java.beans.Transient;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Map;
+
+/**
+ * Bean to represent saved query
+ */
+public class JobImpl implements Job {
+  private String title = null;
+  private String queryFile = null;
+  private String statusDir = null;
+  private Long dateSubmitted = 0L;
+  private Long duration = 0L;
+  private String forcedContent = null;
+  private String dataBase = null;
+  private String queryId = null;
+
+  private String status = JOB_STATE_UNKNOWN;
+  private String statusMessage = null;
+  private String sqlState = null;
+
+  private String applicationId;
+  private String dagId;
+  private String dagName;
+
+  private String sessionTag;
+  private String referrer;
+  private String globalSettings;
+
+  private String id = null;
+  private String owner = null;
+
+  private String logFile;
+  private String confFile;
+
+  private String guid = null;
+
+  private String hiveQueryId;
+
+  public JobImpl() {}
+  public JobImpl(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
+    for (Map.Entry<String, Object> entry : stringObjectMap.entrySet())  {
+      try {
+        PropertyUtils.setProperty(this, entry.getKey(), entry.getValue());
+      } catch (NoSuchMethodException e) {
+        //do nothing, skip
+      }
+    }
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (!(o instanceof Job)) return false;
+
+    JobImpl job = (JobImpl) o;
+
+    return id != null ? id.equals(job.id) : job.id == null;
+
+  }
+
+  @Override
+  public int hashCode() {
+    return id != null ? id.hashCode() : 0;
+  }
+
+  @Override
+  @Transient
+  public String getHiveQueryId() {
+    return hiveQueryId;
+  }
+
+  @Override
+  @Transient
+  public void setHiveQueryId(String hiveQueryId) {
+    this.hiveQueryId = hiveQueryId;
+  }
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public String getOwner() {
+    return owner;
+  }
+
+  @Override
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  @Override
+  public String getTitle() {
+    return title;
+  }
+
+  @Override
+  public void setTitle(String title) {
+    this.title = title;
+  }
+
+  @Override
+  public String getQueryFile() {
+    return queryFile;
+  }
+
+  @Override
+  public void setQueryFile(String queryFile) {
+    this.queryFile = queryFile;
+  }
+
+  @Override
+  public Long getDateSubmitted() {
+    return dateSubmitted;
+  }
+
+  @Override
+  public void setDateSubmitted(Long dateSubmitted) {
+    this.dateSubmitted = dateSubmitted;
+  }
+
+  @Override
+  public Long getDuration() {
+    return duration;
+  }
+
+  @Override
+  public void setDuration(Long duration) {
+    this.duration = duration;
+  }
+
+  @Override
+  public String getStatus() {
+    return status;
+  }
+
+  @Override
+  public void setStatus(String status) {
+    this.status = status;
+  }
+
+  @Override
+  @Transient
+  public String getForcedContent() {
+    return forcedContent;
+  }
+
+  @Override
+  @Transient
+  public void setForcedContent(String forcedContent) {
+    this.forcedContent = forcedContent;
+  }
+
+  @Override
+  public String getQueryId() {
+    return queryId;
+  }
+
+  @Override
+  public void setQueryId(String queryId) {
+    this.queryId = queryId;
+  }
+
+  @Override
+  public String getStatusDir() {
+    return statusDir;
+  }
+
+  @Override
+  public void setStatusDir(String statusDir) {
+    this.statusDir = statusDir;
+  }
+
+  @Override
+  public String getDataBase() {
+    return dataBase;
+  }
+
+  @Override
+  public void setDataBase(String dataBase) {
+    this.dataBase = dataBase;
+  }
+
+  @Override
+  public String getLogFile() {
+    return logFile;
+  }
+
+  @Override
+  public void setLogFile(String logFile) {
+    this.logFile = logFile;
+  }
+
+  @Override
+  public String getConfFile() {
+    return confFile;
+  }
+
+  @Override
+  public void setConfFile(String confFile) {
+    this.confFile = confFile;
+  }
+
+  @Override
+  public String getApplicationId() {
+    return applicationId;
+  }
+
+  @Override
+  public void setApplicationId(String applicationId) {
+    this.applicationId = applicationId;
+  }
+
+  @Override
+  public String getDagName() {
+    return dagName;
+  }
+
+  @Override
+  public void setDagName(String dagName) {
+    this.dagName = dagName;
+  }
+
+  @Override
+  public String getDagId() {
+    return dagId;
+  }
+
+  @Override
+  public void setDagId(String dagId) {
+    this.dagId = dagId;
+  }
+
+  @Override
+  public String getSessionTag() {
+    return sessionTag;
+  }
+
+  @Override
+  public void setSessionTag(String sessionTag) {
+    this.sessionTag = sessionTag;
+  }
+
+  @Override
+  @Transient
+  public String getStatusMessage() {
+    return statusMessage;
+  }
+
+  @Override
+  @Transient
+  public void setStatusMessage(String statusMessage) {
+    this.statusMessage = statusMessage;
+  }
+
+  @Override
+  public String getSqlState() {
+    return sqlState;
+  }
+
+  @Override
+  public void setSqlState(String sqlState) {
+    this.sqlState = sqlState;
+  }
+
+  @Override
+  public String getReferrer() {
+    return referrer;
+  }
+
+  @Override
+  public void setReferrer(String referrer) {
+    this.referrer = referrer;
+  }
+
+  @Override
+  public String getGlobalSettings() {
+    return globalSettings;
+  }
+
+  @Override
+  public void setGlobalSettings(String globalSettings) {
+    this.globalSettings = globalSettings;
+  }
+
+  @Override
+  public String getGuid() {
+    return guid;
+  }
+
+  @Override
+  public void setGuid(String guid) {
+    this.guid = guid;
+  }
+
+  @Override
+  public String toString() {
+    return new StringBuilder("JobImpl{")
+      .append("id='").append(id)
+      .append(", owner='").append(owner)
+      .append(", hiveQueryId='").append(hiveQueryId)
+      .append(", dagId='").append(dagId)
+      .append(", queryId='").append(queryId)
+      .append('}').toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobInfo.java
new file mode 100644
index 0000000..e4ddc72
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobInfo.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.viewJobs;
+
+public class JobInfo {
+  private String jobId;
+  private String hiveId;
+  private String dagId;
+  private String operationId;
+
+  public JobInfo() {
+  }
+
+  public JobInfo(String jobId, String hiveId, String dagId, String operationId) {
+    this.jobId = jobId;
+    this.hiveId = hiveId;
+    this.dagId = dagId;
+    this.operationId = operationId;
+  }
+
+  public String getJobId() {
+    return jobId;
+  }
+
+  public void setJobId(String jobId) {
+    this.jobId = jobId;
+  }
+
+  public String getHiveId() {
+    return hiveId;
+  }
+
+  public void setHiveId(String hiveId) {
+    this.hiveId = hiveId;
+  }
+
+  public String getDagId() {
+    return dagId;
+  }
+
+  public void setDagId(String dagId) {
+    this.dagId = dagId;
+  }
+
+  public String getOperationId() {
+    return operationId;
+  }
+
+  public void setOperationId(String operationId) {
+    this.operationId = operationId;
+  }
+
+  @Override
+  public String toString() {
+    return new StringBuilder().append("JobInfo{" )
+      .append("jobId=").append(jobId)
+      .append(", hiveId=").append(hiveId)
+      .append(", dagId=").append(dagId)
+      .append(", operationId=").append(operationId)
+      .append('}').toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobResourceManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobResourceManager.java
new file mode 100644
index 0000000..1bfdcaa
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobResourceManager.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.viewJobs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.hive20.utils.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.*;
+
+/**
+ * Object that provides CRUD operations for job objects
+ */
+public class JobResourceManager extends PersonalCRUDResourceManager<Job> {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(JobResourceManager.class);
+
+  private IJobControllerFactory jobControllerFactory;
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public JobResourceManager(SharedObjectsFactory sharedObjectsFactory, ViewContext context) {
+    super(JobImpl.class, sharedObjectsFactory, context);
+    jobControllerFactory = sharedObjectsFactory.getJobControllerFactory();
+  }
+
+  @Override
+  public Job create(Job object) {
+    super.create(object);
+    JobController jobController = jobControllerFactory.createControllerForJob(object);
+
+    try {
+
+      jobController.afterCreation();
+      saveIfModified(jobController);
+
+    } catch (ServiceFormattedException e) {
+      cleanupAfterErrorAndThrowAgain(object, e);
+    }
+
+    return object;
+  }
+
+  public void saveIfModified(JobController jobController) {
+    if (jobController.isModified()) {
+      save(jobController.getJobPOJO());
+      jobController.clearModified();
+    }
+  }
+
+
+  @Override
+  public Job read(Object id) throws ItemNotFound {
+    return super.read(id);
+  }
+
+  @Override
+  public List<Job> readAll(FilteringStrategy filteringStrategy) {
+    return super.readAll(filteringStrategy);
+  }
+
+  @Override
+  public void delete(Object resourceId) throws ItemNotFound {
+    super.delete(resourceId);
+  }
+
+  public JobController readController(Object id) throws ItemNotFound {
+    Job job = read(id);
+    return jobControllerFactory.createControllerForJob(job);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceItem.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceItem.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceItem.java
new file mode 100644
index 0000000..f9478be
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceItem.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.resources;
+
+import org.apache.ambari.view.hive20.persistence.utils.PersonalResource;
+import org.apache.commons.beanutils.BeanUtils;
+
+import java.io.Serializable;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Map;
+
+/**
+ * Bean to represent file resource
+ */
+public class FileResourceItem implements Serializable, PersonalResource {
+  private String name;
+  private String path;
+
+  private String id;
+  private String owner;
+
+  public FileResourceItem() {}
+  public FileResourceItem(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
+    BeanUtils.populate(this, stringObjectMap);
+  }
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public String getOwner() {
+    return owner;
+  }
+
+  @Override
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getPath() {
+    return path;
+  }
+
+  public void setPath(String path) {
+    this.path = path;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceResourceManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceResourceManager.java
new file mode 100644
index 0000000..bc52158
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceResourceManager.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.resources;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.IStorageFactory;
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.resources.PersonalCRUDResourceManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+/**
+ * Object that provides CRUD operations for resource objects
+ */
+public class FileResourceResourceManager extends PersonalCRUDResourceManager<FileResourceItem> {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(FileResourceResourceManager.class);
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public FileResourceResourceManager(IStorageFactory storageFactory, ViewContext context) {
+    super(FileResourceItem.class, storageFactory, context);
+  }
+
+  @Override
+  public FileResourceItem create(FileResourceItem object) {
+    return super.create(object);
+  }
+
+  @Override
+  public FileResourceItem read(Object id) throws ItemNotFound {
+    return super.read(id);
+  }
+
+  @Override
+  public void delete(Object resourceId) throws ItemNotFound {
+    super.delete(resourceId);
+  }
+
+  @Override
+  public List<FileResourceItem> readAll(FilteringStrategy filteringStrategy) {
+    return super.readAll(filteringStrategy);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceResourceProvider.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceResourceProvider.java
new file mode 100644
index 0000000..411099f
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceResourceProvider.java
@@ -0,0 +1,110 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.resources;
+
+import org.apache.ambari.view.*;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Resource provider for resource
+ */
+public class FileResourceResourceProvider implements ResourceProvider<FileResourceItem> {
+  @Inject
+  ViewContext context;
+
+  protected FileResourceResourceManager resourceManager = null;
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(FileResourceResourceProvider.class);
+
+  protected synchronized FileResourceResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new FileResourceResourceManager(new SharedObjectsFactory(context), context);
+    }
+    return resourceManager;
+  }
+
+  @Override
+  public FileResourceItem getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      return getResourceManager().read(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+  }
+
+  @Override
+  public Set<FileResourceItem> getResources(ReadRequest readRequest) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    if (context == null) {
+      return new HashSet<FileResourceItem>();
+    }
+    return new HashSet<FileResourceItem>(getResourceManager().readAll(
+        new OnlyOwnersFilteringStrategy(this.context.getUsername())));
+  }
+
+  @Override
+  public void createResource(String s, Map<String, Object> stringObjectMap) throws SystemException, ResourceAlreadyExistsException, NoSuchResourceException, UnsupportedPropertyException {
+    FileResourceItem item = null;
+    try {
+      item = new FileResourceItem(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on creating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on creating resource", e);
+    }
+    getResourceManager().create(item);
+  }
+
+  @Override
+  public boolean updateResource(String resourceId, Map<String, Object> stringObjectMap) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    FileResourceItem item = null;
+    try {
+      item = new FileResourceItem(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on updating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on updating resource", e);
+    }
+    try {
+      getResourceManager().update(item, resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+
+  @Override
+  public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      getResourceManager().delete(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceService.java
new file mode 100644
index 0000000..3ed8d17
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/resources/FileResourceService.java
@@ -0,0 +1,180 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.resources;
+
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive20.utils.NotFoundFormattedException;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.*;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.util.List;
+
+/**
+ * Servlet for Resources
+ * API:
+ * GET /:id
+ *      read resource
+ * POST /
+ *      create new resource
+ * GET /
+ *      get all resource of current user
+ */
+public class FileResourceService extends BaseService {
+  @Inject
+  ViewResourceHandler handler;
+
+  protected FileResourceResourceManager resourceManager = null;
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(FileResourceService.class);
+
+  protected synchronized FileResourceResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new FileResourceResourceManager(getSharedObjectsFactory(), context);
+    }
+    return resourceManager;
+  }
+
+  /**
+   * Get single item
+   */
+  @GET
+  @Path("{id}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getOne(@PathParam("id") String id) {
+    try {
+      FileResourceItem fileResourceItem = getResourceManager().read(id);
+      JSONObject object = new JSONObject();
+      object.put("fileResource", fileResourceItem);
+      return Response.ok(object).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Delete single item
+   */
+  @DELETE
+  @Path("{id}")
+  public Response delete(@PathParam("id") String id) {
+    try {
+      getResourceManager().delete(id);
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Get all resources
+   */
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getList() {
+    try {
+      LOG.debug("Getting all resources");
+      List items = getResourceManager().readAll(
+          new OnlyOwnersFilteringStrategy(this.context.getUsername()));  //TODO: move strategy to PersonalCRUDRM
+
+      JSONObject object = new JSONObject();
+      object.put("fileResources", items);
+      return Response.ok(object).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Update item
+   */
+  @PUT
+  @Path("{id}")
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response update(ResourceRequest request,
+                         @PathParam("id") String id) {
+    try {
+      getResourceManager().update(request.fileResource, id);
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Create resource
+   */
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response create(ResourceRequest request, @Context HttpServletResponse response,
+                         @Context UriInfo ui) {
+    try {
+      getResourceManager().create(request.fileResource);
+
+      FileResourceItem item = null;
+
+      item = getResourceManager().read(request.fileResource.getId());
+
+      response.setHeader("Location",
+          String.format("%s/%s", ui.getAbsolutePath().toString(), request.fileResource.getId()));
+
+      JSONObject object = new JSONObject();
+      object.put("fileResource", item);
+      return Response.ok(object).status(201).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Wrapper object for json mapping
+   */
+  public static class ResourceRequest {
+    public FileResourceItem fileResource;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQuery.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQuery.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQuery.java
new file mode 100644
index 0000000..cbb4e4a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQuery.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.savedQueries;
+
+import org.apache.ambari.view.hive20.persistence.utils.PersonalResource;
+import org.apache.commons.beanutils.BeanUtils;
+
+import java.io.Serializable;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Map;
+
+/**
+ * Bean to represent saved query
+ */
+public class SavedQuery implements Serializable, PersonalResource {
+  private String queryFile;
+  private String dataBase;
+  private String title;
+  private String shortQuery;
+
+  private String id;
+  private String owner;
+
+  public SavedQuery() {}
+  public SavedQuery(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
+    BeanUtils.populate(this, stringObjectMap);
+  }
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public String getOwner() {
+    return owner;
+  }
+
+  @Override
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  public String getQueryFile() {
+    return queryFile;
+  }
+
+  public void setQueryFile(String queryFile) {
+    this.queryFile = queryFile;
+  }
+
+  public String getDataBase() {
+    return dataBase;
+  }
+
+  public void setDataBase(String dataBase) {
+    this.dataBase = dataBase;
+  }
+
+  public String getTitle() {
+    return title;
+  }
+
+  public void setTitle(String title) {
+    this.title = title;
+  }
+
+  public String getShortQuery() {
+    return shortQuery;
+  }
+
+  public void setShortQuery(String shortQuery) {
+    this.shortQuery = shortQuery;
+  }
+}


[14/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/Storage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/Storage.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/Storage.java
new file mode 100644
index 0000000..624829b
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/Storage.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence;
+
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+
+import java.util.List;
+
+/**
+ * Object storage interface
+ */
+public interface Storage {
+  /**
+   * Persist object to DB. It should be Indexed
+   * @param obj object to save
+   */
+  <T extends Indexed> void store(Class<T> model, Indexed obj);
+
+  /**
+   * Load object
+   * @param model bean class
+   * @param id identifier
+   * @return bean instance
+   * @throws ItemNotFound thrown if item with id was not found in DB
+   */
+  <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound;
+
+  /**
+   * Load all objects of given bean class
+   * @param model bean class
+   * @param filter filtering strategy (return only those objects that conform condition)
+   * @param <T> bean class
+   * @return list of filtered objects
+   */
+  <T extends Indexed> List<T> loadAll(Class<? extends T> model, FilteringStrategy filter);
+
+  /**
+   * Load all objects of given bean class
+   * @param model bean class
+   * @param <T> bean class
+   * @return list of all objects
+   */
+  <T extends Indexed> List<T> loadAll(Class<T> model);
+
+  /**
+   * Delete object
+   * @param model bean class
+   * @param id identifier
+   */
+  void delete(Class model, Object id) throws ItemNotFound;
+
+  /**
+   * Check is object exists
+   * @param model bean class
+   * @param id identifier
+   * @return true if exists
+   */
+  boolean exists(Class model, Object id);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ContextConfigurationAdapter.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ContextConfigurationAdapter.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ContextConfigurationAdapter.java
new file mode 100644
index 0000000..be69f82
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ContextConfigurationAdapter.java
@@ -0,0 +1,260 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.commons.configuration.Configuration;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Persistence API to Apache Configuration adapter
+ */
+@Deprecated
+public class ContextConfigurationAdapter implements Configuration {
+  private ViewContext context;
+
+  /**
+   * Constructor of adapter
+   * @param context View Context
+   */
+  public ContextConfigurationAdapter(ViewContext context) {
+    this.context = context;
+  }
+
+  @Override
+  public Configuration subset(String prefix) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public boolean isEmpty() {
+    return context.getInstanceData().isEmpty();
+  }
+
+  @Override
+  public boolean containsKey(String s) {
+    Map<String, String> data = context.getInstanceData();
+    return data.containsKey(s);
+  }
+
+  @Override
+  public void addProperty(String s, Object o) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public void setProperty(String s, Object o) {
+    context.putInstanceData(s, o.toString());
+  }
+
+  @Override
+  public void clearProperty(String key) {
+    context.removeInstanceData(key);
+  }
+
+  @Override
+  public void clear() {
+    for (String key : context.getInstanceData().keySet())
+      context.removeInstanceData(key);
+  }
+
+  @Override
+  public Object getProperty(String key) {
+    return context.getInstanceData(key);
+  }
+
+  @Override
+  public Iterator getKeys(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public Iterator getKeys() {
+    return context.getInstanceData().keySet().iterator();
+  }
+
+  @Override
+  public Properties getProperties(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public boolean getBoolean(String s) {
+    return getBoolean(s, null);
+  }
+
+  @Override
+  public boolean getBoolean(String s, boolean b) {
+    return getBoolean(s, (Boolean)b);
+  }
+
+  @Override
+  public Boolean getBoolean(String s, Boolean aBoolean) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Boolean.parseBoolean(data):aBoolean;
+  }
+
+  @Override
+  public byte getByte(String s) {
+    return getByte(s, null);
+  }
+
+  @Override
+  public byte getByte(String s, byte b) {
+    return getByte(s, (Byte)b);
+  }
+
+  @Override
+  public Byte getByte(String s, Byte aByte) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Byte.parseByte(data):aByte;
+  }
+
+  @Override
+  public double getDouble(String s) {
+    return getDouble(s, null);
+  }
+
+  @Override
+  public double getDouble(String s, double v) {
+    return getDouble(s, (Double)v);
+  }
+
+  @Override
+  public Double getDouble(String s, Double aDouble) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Double.parseDouble(data):aDouble;
+  }
+
+  @Override
+  public float getFloat(String s) {
+    return getFloat(s, null);
+  }
+
+  @Override
+  public float getFloat(String s, float v) {
+    return getFloat(s, (Float)v);
+  }
+
+  @Override
+  public Float getFloat(String s, Float aFloat) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Float.parseFloat(data):aFloat;
+  }
+
+  @Override
+  public int getInt(String s) {
+    return getInteger(s, null);
+  }
+
+  @Override
+  public int getInt(String s, int i) {
+    return getInteger(s, i);
+  }
+
+  @Override
+  public Integer getInteger(String s, Integer integer) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Integer.parseInt(data):integer;
+  }
+
+  @Override
+  public long getLong(String s) {
+    return getLong(s, null);
+  }
+
+  @Override
+  public long getLong(String s, long l) {
+    return getLong(s, (Long)l);
+  }
+
+  @Override
+  public Long getLong(String s, Long aLong) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Long.parseLong(data):aLong;
+  }
+
+  @Override
+  public short getShort(String s) {
+    return getShort(s, null);
+  }
+
+  @Override
+  public short getShort(String s, short i) {
+    return getShort(s, (Short)i);
+  }
+
+  @Override
+  public Short getShort(String s, Short aShort) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Short.parseShort(data):aShort;
+  }
+
+  @Override
+  public BigDecimal getBigDecimal(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public BigDecimal getBigDecimal(String s, BigDecimal bigDecimal) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public BigInteger getBigInteger(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public BigInteger getBigInteger(String s, BigInteger bigInteger) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public String getString(String s) {
+    return context.getInstanceData(s);
+  }
+
+  @Override
+  public String getString(String s, String s2) {
+    String data = getString(s);
+    return (data != null)?data:s2;
+  }
+
+  @Override
+  public String[] getStringArray(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public List getList(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public List getList(String s, List list) {
+    throw new UnsupportedOperationException();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/FilteringStrategy.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/FilteringStrategy.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/FilteringStrategy.java
new file mode 100644
index 0000000..e55d976
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/FilteringStrategy.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+/**
+ * Filtering strategy for stored objects
+ */
+public interface FilteringStrategy {
+  /**
+   * Check whether item conforms chosen filter or not
+   * @param item item to check
+   * @return true if item conforms this filter
+   */
+  boolean isConform(Indexed item);
+  String whereStatement();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Indexed.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Indexed.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Indexed.java
new file mode 100644
index 0000000..71d2e55
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Indexed.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+/**
+ * Interface to represent item with identifier
+ */
+public interface Indexed {
+  /**
+   * Get the ID
+   * @return ID
+   */
+  String getId();
+
+  /**
+   * Set ID
+   * @param id ID
+   */
+  void setId(String id);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ItemNotFound.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ItemNotFound.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ItemNotFound.java
new file mode 100644
index 0000000..cf69677
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ItemNotFound.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+/**
+ * Thrown when item was not found in DB
+ */
+public class ItemNotFound extends Exception {
+  public ItemNotFound() {
+  }
+
+  public ItemNotFound(String message) {
+    super(message);
+  }
+
+  public ItemNotFound(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public ItemNotFound(Throwable cause) {
+    super(cause);
+  }
+
+  public ItemNotFound(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/OnlyOwnersFilteringStrategy.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/OnlyOwnersFilteringStrategy.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/OnlyOwnersFilteringStrategy.java
new file mode 100644
index 0000000..8f72031
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/OnlyOwnersFilteringStrategy.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+public class OnlyOwnersFilteringStrategy implements FilteringStrategy {
+  private final String username;
+
+  public OnlyOwnersFilteringStrategy(String username) {
+    this.username = username;
+  }
+
+  @Override
+  public boolean isConform(Indexed item) {
+    Owned object = (Owned) item;
+    return object.getOwner().compareTo(username) == 0;
+  }
+
+  @Override
+  public String whereStatement() {
+    return "owner = '" + username + "'";
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Owned.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Owned.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Owned.java
new file mode 100644
index 0000000..56793a5
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Owned.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+/**
+ * Interface to represent item with owner
+ */
+public interface Owned {
+  /**
+   * Get the owner
+   * @return owner
+   */
+  String getOwner();
+
+  /**
+   * Set owner
+   * @param owner owner
+   */
+  void setOwner(String owner);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/PersonalResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/PersonalResource.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/PersonalResource.java
new file mode 100644
index 0000000..6364b98
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/PersonalResource.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+public interface PersonalResource extends Indexed, Owned {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/StorageFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/StorageFactory.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/StorageFactory.java
new file mode 100644
index 0000000..1d3ea14
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/StorageFactory.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.DataStoreStorage;
+import org.apache.ambari.view.hive20.persistence.IStorageFactory;
+import org.apache.ambari.view.hive20.persistence.LocalKeyValueStorage;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Storage factory, creates storage of Local or Persistence API type.
+ * Type depends on context configuration: if "dataworker.storagePath" is set,
+ * storage of Local type will be created.  Otherwise, Persistence API will be used.
+ *
+ * Storage is singleton.
+ */
+public class StorageFactory implements IStorageFactory {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(StorageFactory.class);
+
+  private ViewContext context;
+
+  /**
+   * Constructor of storage factory
+   * @param context View Context instance
+   */
+  public StorageFactory(ViewContext context) {
+    this.context = context;
+  }
+
+  /**
+   * Creates storage instance
+   * @return storage instance
+   */
+  public Storage getStorage() {
+    String fileName = context.getProperties().get("dataworker.storagePath");
+
+    Storage storageInstance;
+    if (fileName != null) {
+      LOG.debug("Using local storage in " + fileName + " to store data");
+      // If specifed, use LocalKeyValueStorage - key-value file based storage
+      storageInstance = new LocalKeyValueStorage(context);
+    } else {
+      LOG.debug("Using Persistence API to store data");
+      // If not specifed, use ambari-views Persistence API
+      storageInstance = new DataStoreStorage(context);
+    }
+    return storageInstance;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/CRUDResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/CRUDResourceManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/CRUDResourceManager.java
new file mode 100644
index 0000000..41a8ee5
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/CRUDResourceManager.java
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources;
+
+import org.apache.ambari.view.hive20.persistence.IStorageFactory;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+
+import java.util.List;
+
+/**
+ * CRUD resource manager
+ * @param <T> Data type with ID
+ */
+abstract public class CRUDResourceManager<T extends Indexed> implements IResourceManager<T> {
+  //TODO: refactor: generic parameter gets Fabric for Indexed objects, not objects itself
+  private Storage storage = null;
+
+  protected final Class<? extends T> resourceClass;
+  protected IStorageFactory storageFactory;
+
+  /**
+   * Constructor
+   * @param resourceClass model class
+   */
+  public CRUDResourceManager(Class<? extends T> resourceClass, IStorageFactory storageFactory) {
+    this.resourceClass = resourceClass;
+    this.storageFactory = storageFactory;
+  }
+  // CRUD operations
+
+  /**
+   * Create operation
+   * @param object object
+   * @return model object
+   */
+  @Override
+  public T create(T object) {
+    object.setId(null);
+    return this.save(object);
+  }
+
+  /**
+   * Read operation
+   * @param id identifier
+   * @return model object
+   * @throws org.apache.ambari.view.hive20.persistence.utils.ItemNotFound
+   */
+  @Override
+  public T read(Object id) throws ItemNotFound {
+    T object = null;
+    object = storageFactory.getStorage().load(this.resourceClass, id);
+    if (!checkPermissions(object))
+      throw new ItemNotFound();
+    return object;
+  }
+
+  /**
+   * Read all objects
+   * @param filteringStrategy filtering strategy
+   * @return list of filtered objects
+   */
+  @Override
+  public List<T> readAll(FilteringStrategy filteringStrategy) {
+    return storageFactory.getStorage().loadAll(this.resourceClass, filteringStrategy);
+  }
+
+  /**
+   * Update operation
+   * @param newObject new object
+   * @param id identifier of previous object
+   * @return model object
+   * @throws org.apache.ambari.view.hive20.persistence.utils.ItemNotFound
+   */
+  @Override
+  public T update(T newObject, String id) throws ItemNotFound {
+    newObject.setId(id);
+    this.save(newObject);
+    return newObject;
+  }
+
+  /**
+   * Delete operation
+   * @param resourceId object identifier
+   * @throws org.apache.ambari.view.hive20.persistence.utils.ItemNotFound
+   */
+  @Override
+  public void delete(Object resourceId) throws ItemNotFound {
+    if (!storageFactory.getStorage().exists(this.resourceClass, resourceId)) {
+      throw new ItemNotFound();
+    }
+    storageFactory.getStorage().delete(this.resourceClass, resourceId);
+  }
+
+  // UTILS
+
+  protected T save(T object) {
+    storageFactory.getStorage().store(resourceClass, object);
+    return object;
+  }
+
+  protected abstract boolean checkPermissions(T object);
+
+  protected void cleanupAfterErrorAndThrowAgain(Indexed object, ServiceFormattedException e) {
+    try {
+      delete(object.getId());
+    } catch (ItemNotFound itemNotFound) {
+      throw new ServiceFormattedException("E040 Item not found", itemNotFound);
+    }
+    throw e;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/IResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/IResourceManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/IResourceManager.java
new file mode 100644
index 0000000..2fd3c53
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/IResourceManager.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources;
+
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+
+import java.util.List;
+
+public interface IResourceManager<T extends Indexed> {
+  T create(T object);
+
+  T read(Object id) throws ItemNotFound;
+
+  List<T> readAll(FilteringStrategy filteringStrategy);
+
+  T update(T newObject, String id) throws ItemNotFound;
+
+  void delete(Object resourceId) throws ItemNotFound;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/PersonalCRUDResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/PersonalCRUDResourceManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/PersonalCRUDResourceManager.java
new file mode 100644
index 0000000..8d2ab86
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/PersonalCRUDResourceManager.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.IStorageFactory;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.PersonalResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.concurrent.Callable;
+
+/**
+ * Resource manager that returns only user owned elements from DB
+ * @param <T> Data type with ID and Owner
+ */
+public class PersonalCRUDResourceManager<T extends PersonalResource> extends CRUDResourceManager<T> {
+  protected boolean ignorePermissions = false;
+
+  private final static Logger LOG =
+      LoggerFactory.getLogger(PersonalCRUDResourceManager.class);
+  protected ViewContext context;
+
+  /**
+   * Constructor
+   * @param resourceClass model class
+   */
+  public PersonalCRUDResourceManager(Class<? extends T> resourceClass, IStorageFactory storageFabric, ViewContext context) {
+    super(resourceClass, storageFabric);
+    this.context = context;
+  }
+
+  @Override
+  public T update(T newObject, String id) throws ItemNotFound {
+    T object = storageFactory.getStorage().load(this.resourceClass, id);
+    if (object.getOwner().compareTo(this.context.getUsername()) != 0) {
+      throw new ItemNotFound();
+    }
+
+    newObject.setOwner(this.context.getUsername());
+    return super.update(newObject, id);
+  }
+
+  @Override
+  public T save(T object) {
+    if (!ignorePermissions) {
+      // in threads permissions should be ignored,
+      // because context.getUsername doesn't work. See BUG-27093.
+      object.setOwner(this.context.getUsername());
+    }
+    return super.save(object);
+  }
+
+  @Override
+  protected boolean checkPermissions(T object) {
+    if (ignorePermissions) {
+      return true;
+    }
+    return object.getOwner().compareTo(this.context.getUsername()) == 0;
+  }
+
+  /**
+   * Execute action ignoring objects owner
+   * @param actions callable to execute
+   * @return value returned from actions
+   * @throws Exception
+   */
+  public T ignorePermissions(Callable<T> actions) throws Exception {
+    ignorePermissions = true;
+    T result;
+    try {
+      result = actions.call();
+    } finally {
+      ignorePermissions = false;
+    }
+    return result;
+  }
+
+  protected String getUsername() {
+    return context.getUsername();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/SharedCRUDResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/SharedCRUDResourceManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/SharedCRUDResourceManager.java
new file mode 100644
index 0000000..ef48d9e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/SharedCRUDResourceManager.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.IStorageFactory;
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+
+/**
+ * Resource manager that doesn't restrict access (Allow all)
+ * @param <T> Data type with ID
+ */
+public class SharedCRUDResourceManager<T extends Indexed> extends CRUDResourceManager<T> {
+  protected ViewContext context;
+
+  /**
+   * Constructor
+   * @param responseClass model class
+   */
+  public SharedCRUDResourceManager(Class<T> responseClass, IStorageFactory storageFabric) {
+    super(responseClass, storageFabric);
+  }
+
+  @Override
+  protected boolean checkPermissions(T object) {
+    return true; //everyone has permission
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/ConnectionService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/ConnectionService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/ConnectionService.java
new file mode 100644
index 0000000..30fda79
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/ConnectionService.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.browser;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.AuthParams;
+import org.apache.ambari.view.hive20.ConnectionFactory;
+import org.apache.ambari.view.hive20.ConnectionSystem;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.internal.ConnectionException;
+import org.apache.ambari.view.hive20.internal.HiveConnectionWrapper;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+/**
+ * Connection verification and management controller
+ */
+public class ConnectionService {
+
+    public static final String NO_PASSWORD = "";
+    public static final String SUFFIX = "validating the login";
+    @Inject
+  protected ViewContext context;
+
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(ConnectionService.class);
+
+  /**
+   * Check if LDAP is configured on Hive
+   * if no password is cached , ask for one(401)
+   * if yes and a password is cached, try
+   * to connect, if connection succeeds
+   * return OK,
+   *
+   * if connection fails - ask for one again(401)
+   */
+  @GET
+  @Path("connect")
+  @Produces(MediaType.APPLICATION_JSON)
+
+  public Response attemptConnection() {
+    boolean ldapEnabled = ConnectionFactory.isLdapEnabled(context);
+    if(ldapEnabled) {
+      ConnectionSystem instance = ConnectionSystem.getInstance();
+      Optional<String> password = instance.getPassword(context);
+      if (!password.isPresent()) {
+        // No password cached - request for one
+        return Response.status(Response.Status.UNAUTHORIZED).build();
+      }
+      // if there was a password cached, make a connection attempt
+      // get the password
+        String pass = password.get();
+      // password may be stale, try to connect to Hive
+        return attemptHiveConnection(pass);
+    }
+      return attemptHiveConnection(NO_PASSWORD);
+
+  }
+
+
+    private Response getOKResponse() {
+        JSONObject response = new JSONObject();
+        response.put("message", "OK");
+        response.put("trace", null);
+        response.put("status", "200");
+        return Response.ok().entity(response).type(MediaType.APPLICATION_JSON).build();
+    }
+
+    private Response attemptHiveConnection(String pass) {
+        ConnectionConfig connectionConfig = ConnectionFactory.create(context);
+        HiveConnectionWrapper hiveConnectionWrapper = new HiveConnectionWrapper(connectionConfig.getJdbcUrl(), connectionConfig.getUsername(), pass,new AuthParams(context));
+        try {
+          hiveConnectionWrapper.connect();
+        } catch (ConnectionException e) {
+          // Cannot connect with the current credentials
+          // check the message to see if the cause was a login failure
+          // return a 401
+          // else return a 500
+          if(isLoginError(e))
+            return Response.status(Response.Status.UNAUTHORIZED).build();
+          else
+              throw new ServiceFormattedException(e.getMessage(), e);
+        } finally {
+          try {
+            hiveConnectionWrapper.disconnect();
+          }
+        catch(ConnectionException e){
+           LOG.warn("Cannot close the connection");
+        }
+      }
+        return getOKResponse()  ;
+    }
+
+    private boolean isLoginError(ConnectionException ce) {
+        return ce.getCause().getMessage().toLowerCase().endsWith(SUFFIX);
+    }
+
+
+    /**
+     * Set password
+     * This just updates the caches.
+     */
+    @POST
+    @Path("auth")
+    @Consumes(MediaType.APPLICATION_JSON)
+    public Response setupPassword(AuthRequest request) {
+        try {
+            //Cache the password for the user
+            ConnectionSystem instance = ConnectionSystem.getInstance();
+            instance.persistCredentials(context.getUsername(),request.password);
+            return getOKResponse();
+        } catch (WebApplicationException ex) {
+            throw ex;
+        } catch (Exception ex) {
+            throw new ServiceFormattedException(ex.getMessage(), ex);
+        }
+    }
+
+
+
+    public static class AuthRequest {
+        public String password;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
new file mode 100644
index 0000000..5a2d389
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
@@ -0,0 +1,302 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.browser;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Inbox;
+import com.google.common.base.Function;
+import com.google.common.base.Optional;
+import com.google.common.base.Predicate;
+import com.google.common.base.Strings;
+import com.google.common.collect.FluentIterable;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.ConnectionSystem;
+import org.apache.ambari.view.hive20.actor.DatabaseManager;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.client.DDLDelegator;
+import org.apache.ambari.view.hive20.client.DDLDelegatorImpl;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.exceptions.ServiceException;
+import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
+import org.apache.ambari.view.hive20.internal.dto.DatabaseResponse;
+import org.apache.ambari.view.hive20.internal.dto.TableInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.apache.ambari.view.hive20.internal.dto.TableResponse;
+import org.apache.ambari.view.hive20.internal.parsers.TableMetaParserImpl;
+import org.apache.ambari.view.hive20.internal.query.generators.AlterTableQueryGenerator;
+import org.apache.ambari.view.hive20.internal.query.generators.CreateTableQueryGenerator;
+import org.apache.ambari.view.hive20.internal.query.generators.DeleteTableQueryGenerator;
+import org.apache.ambari.view.hive20.resources.jobs.JobServiceInternal;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobController;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobResourceManager;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.concurrent.duration.Duration;
+
+import javax.annotation.Nullable;
+import javax.inject.Inject;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+/**
+ *
+ */
+public class DDLProxy {
+  private static final Logger LOG = LoggerFactory.getLogger(DDLProxy.class);
+
+  private final ViewContext context;
+  private final TableMetaParserImpl tableMetaParser;
+
+  @Inject
+  public DDLProxy(ViewContext context, TableMetaParserImpl tableMetaParser) {
+    this.context = context;
+    this.tableMetaParser = tableMetaParser;
+    LOG.info("Creating DDLProxy");
+  }
+
+
+  public Set<DatabaseResponse> getDatabases() {
+    Set<DatabaseInfo> infos = getDatabaseInfos();
+    return transformToDatabasesResponse(infos);
+  }
+
+  public DatabaseResponse getDatabase(final String databaseId) {
+    Optional<DatabaseInfo> infoOptional = selectDatabase(databaseId);
+    if (!infoOptional.isPresent()) {
+      // Throw exception
+    }
+
+    return transformToDatabaseResponse(infoOptional.get());
+  }
+
+  public Set<TableResponse> getTables(final String databaseId) {
+    Optional<DatabaseInfo> infoOptional = selectDatabase(databaseId);
+    if (!infoOptional.isPresent()) {
+      // Throw exception;
+    }
+    DatabaseInfo info = infoOptional.get();
+    return transformToTablesResponse(info.getTables(), info.getName());
+  }
+
+  public TableResponse getTable(final String databaseName, final String tableName) {
+    Optional<DatabaseInfo> databaseOptional = selectDatabase(databaseName);
+    if (!databaseOptional.isPresent()) {
+      // Throw exception;
+    }
+    Optional<TableInfo> tableOptional = selectTable(databaseOptional.get().getTables(), tableName);
+    if (!tableOptional.isPresent()) {
+      // Throw exception
+    }
+    return transformToTableResponse(tableOptional.get(), databaseName);
+  }
+
+  public TableMeta getTableProperties(ViewContext context, ConnectionConfig connectionConfig, String databaseName, String tableName) {
+    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+    List<Row> createTableStatementRows = delegator.getTableCreateStatement(connectionConfig, databaseName, tableName);
+    List<Row> describeFormattedRows = delegator.getTableDescriptionFormatted(connectionConfig, databaseName, tableName);
+
+    return tableMetaParser.parse(databaseName, tableName, createTableStatementRows, describeFormattedRows);
+  }
+
+  private Optional<DatabaseInfo> selectDatabase(final String databaseId) {
+    Set<DatabaseInfo> infos = getDatabaseInfos();
+    return FluentIterable.from(infos).filter(new Predicate<DatabaseInfo>() {
+      @Override
+      public boolean apply(@Nullable DatabaseInfo input) {
+        return input.getName().equalsIgnoreCase(databaseId);
+      }
+    }).first();
+  }
+
+  private Set<DatabaseResponse> transformToDatabasesResponse(Set<DatabaseInfo> infos) {
+    return FluentIterable.from(infos).transform(new Function<DatabaseInfo, DatabaseResponse>() {
+      @Nullable
+      @Override
+      public DatabaseResponse apply(@Nullable DatabaseInfo input) {
+        DatabaseResponse response = new DatabaseResponse();
+        response.setId(input.getName());
+        response.setName(input.getName());
+        return response;
+      }
+    }).toSet();
+  }
+
+  private DatabaseResponse transformToDatabaseResponse(DatabaseInfo databaseInfo) {
+    DatabaseResponse response = new DatabaseResponse();
+    response.setName(databaseInfo.getName());
+    response.setId(databaseInfo.getName());
+    Set<TableResponse> tableResponses = transformToTablesResponse(databaseInfo.getTables(), databaseInfo.getName());
+    response.addAllTables(tableResponses);
+    return response;
+  }
+
+  private Set<TableResponse> transformToTablesResponse(final Set<TableInfo> tables, final String databaseName) {
+    return FluentIterable.from(tables).transform(new Function<TableInfo, TableResponse>() {
+      @Nullable
+      @Override
+      public TableResponse apply(@Nullable TableInfo input) {
+        return transformToTableResponse(input, databaseName);
+      }
+    }).toSet();
+  }
+
+  private TableResponse transformToTableResponse(TableInfo tableInfo, String databaseName) {
+    TableResponse response = new TableResponse();
+    response.setId(databaseName + "/" + tableInfo.getName());
+    response.setName(tableInfo.getName());
+    response.setType(tableInfo.getType());
+    response.setDatabaseId(databaseName);
+    return response;
+  }
+
+  private Optional<TableInfo> selectTable(Set<TableInfo> tables, final String tableName) {
+    return FluentIterable.from(tables).filter(new Predicate<TableInfo>() {
+      @Override
+      public boolean apply(@Nullable TableInfo input) {
+        return input.getName().equalsIgnoreCase(tableName);
+      }
+    }).first();
+  }
+
+  private Set<DatabaseInfo> getDatabaseInfos() {
+    ActorRef metaDataManager = ConnectionSystem.getInstance().getMetaDataManager(context);
+    ActorSystem system = ConnectionSystem.getInstance().getActorSystem();
+
+    Inbox inbox = Inbox.create(system);
+
+    inbox.send(metaDataManager, new DatabaseManager.GetDatabases(context.getUsername()));
+    Object receive;
+    try {
+      receive = inbox.receive(Duration.create(60 * 1000, TimeUnit.MILLISECONDS));
+    } catch (Throwable ex) {
+      String errorMessage = "Query timed out to fetch databases information for user: " + context.getUsername();
+      LOG.error(errorMessage, ex);
+      throw new ServiceFormattedException(errorMessage, ex);
+    }
+    Set<DatabaseInfo> infos = new HashSet<>();
+
+    if (receive instanceof DatabaseManager.DatabasesResult) {
+      infos = ((DatabaseManager.DatabasesResult) receive).getDatabases();
+    }
+    return infos;
+  }
+
+  public String generateCreateTableDDL(String databaseName, TableMeta tableMeta) throws ServiceException {
+    if (Strings.isNullOrEmpty(tableMeta.getDatabase())) {
+      tableMeta.setDatabase(databaseName);
+    }
+    Optional<String> createTableQuery = new CreateTableQueryGenerator(tableMeta).getQuery();
+    if(createTableQuery.isPresent()) {
+      LOG.info("generated create table query : {}", createTableQuery);
+      return createTableQuery.get();
+    }else {
+      throw new ServiceException("could not generate create table query for database : " + databaseName + " table : " + tableMeta.getTable());
+    }
+  }
+
+  public Job createTable(String databaseName, TableMeta tableMeta, JobResourceManager resourceManager) throws ServiceException {
+    String createTableQuery = this.generateCreateTableDDL(databaseName, tableMeta);
+    Map jobInfo = new HashMap<>();
+    jobInfo.put("title", "Create table " + tableMeta.getDatabase() + "." + tableMeta.getTable());
+    jobInfo.put("forcedContent", createTableQuery);
+    jobInfo.put("dataBase", databaseName);
+
+    try {
+      Job job = new JobImpl(jobInfo);
+      JobController createdJobController = new JobServiceInternal().createJob(job, resourceManager);
+      Job returnableJob = createdJobController.getJobPOJO();
+      LOG.info("returning job with id {} for create table {}", returnableJob.getId(), tableMeta.getTable());
+      return returnableJob;
+    } catch (Throwable e) {
+      LOG.error("Exception occurred while creating the table for create Query : {}", createTableQuery, e);
+      throw new ServiceException(e);
+    }
+  }
+
+  public Job deleteTable(String databaseName, String tableName, JobResourceManager resourceManager) throws ServiceException {
+    String deleteTableQuery = generateDeleteTableDDL(databaseName, tableName);
+    Map jobInfo = new HashMap<>();
+    jobInfo.put("title", "Delete table " + databaseName + "." + tableName);
+    jobInfo.put("forcedContent", deleteTableQuery);
+    jobInfo.put("dataBase", databaseName);
+
+    try {
+      Job job = new JobImpl(jobInfo);
+      JobController createdJobController = new JobServiceInternal().createJob(job, resourceManager);
+      Job returnableJob = createdJobController.getJobPOJO();
+      LOG.info("returning job with id {} for the deletion of table : {}", returnableJob.getId(), tableName);
+      return returnableJob;
+    } catch (Throwable e) {
+      LOG.error("Exception occurred while deleting the table for delete Query : {}", deleteTableQuery, e);
+      throw new ServiceException(e);
+    }
+  }
+
+  public String generateDeleteTableDDL(String databaseName, String tableName) throws ServiceException {
+    Optional<String> deleteTableQuery = new DeleteTableQueryGenerator(databaseName, tableName).getQuery();
+    if(deleteTableQuery.isPresent()) {
+      LOG.info("deleting table {} with query {}", databaseName + "." + tableName, deleteTableQuery);
+      return deleteTableQuery.get();
+    }else{
+      throw new ServiceException("Failed to generate query for delete table " + databaseName + "." + tableName);
+    }
+  }
+
+  public Job alterTable(ViewContext context, ConnectionConfig hiveConnectionConfig, String databaseName, String oldTableName, TableMeta newTableMeta, JobResourceManager resourceManager) throws ServiceException {
+    String alterQuery = generateAlterTableQuery(context, hiveConnectionConfig, databaseName, oldTableName, newTableMeta);
+    Map jobInfo = new HashMap<>();
+    jobInfo.put("title", "Alter table " + databaseName + "." + oldTableName);
+    jobInfo.put("forcedContent", alterQuery);
+    jobInfo.put("dataBase", databaseName);
+
+    try {
+      Job job = new JobImpl(jobInfo);
+      JobController createdJobController = new JobServiceInternal().createJob(job, resourceManager);
+      Job returnableJob = createdJobController.getJobPOJO();
+      LOG.info("returning job with id {} for alter table {}", returnableJob.getId(), oldTableName);
+      return returnableJob;
+    } catch (Throwable e) {
+      LOG.error("Exception occurred while creating the table for create Query : {}", alterQuery, e);
+      throw new ServiceException(e);
+    }
+  }
+
+  public String generateAlterTableQuery(ViewContext context, ConnectionConfig hiveConnectionConfig, String databaseName, String oldTableName, TableMeta newTableMeta) throws ServiceException {
+    TableMeta oldTableMeta = this.getTableProperties(context, hiveConnectionConfig, databaseName, oldTableName);
+    return generateAlterTableQuery(oldTableMeta, newTableMeta);
+  }
+
+  public String generateAlterTableQuery(TableMeta oldTableMeta, TableMeta newTableMeta) throws ServiceException {
+    AlterTableQueryGenerator queryGenerator = new AlterTableQueryGenerator(oldTableMeta, newTableMeta);
+    Optional<String> alterQuery = queryGenerator.getQuery();
+    if(alterQuery.isPresent()){
+      return alterQuery.get();
+    }else{
+      throw new ServiceException("Failed to generate alter table query for table " + oldTableMeta.getDatabase() + "." + oldTableMeta.getTable());
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
new file mode 100644
index 0000000..3d4e7d7
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.browser;
+
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.exceptions.ServiceException;
+import org.apache.ambari.view.hive20.internal.dto.DatabaseResponse;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.apache.ambari.view.hive20.internal.dto.TableResponse;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobResourceManager;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.util.Set;
+
+/**
+ * Resource to get the DDL information for the database
+ */
+public class DDLService extends BaseService {
+
+  private static final String CREATE_TABLE = "create-table";
+  private static final String ALTER_TABLE = "alter-table";
+  private final DDLProxy proxy;
+  private JobResourceManager resourceManager;
+
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(DDLService.class);
+
+  protected synchronized JobResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      SharedObjectsFactory connectionsFactory = getSharedObjectsFactory();
+      resourceManager = new JobResourceManager(connectionsFactory, context);
+    }
+    return resourceManager;
+  }
+
+  @Inject
+  public DDLService(DDLProxy proxy) {
+    this.proxy = proxy;
+  }
+
+
+  @GET
+  @Path("databases")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getDatabases(@QueryParam("like") String like) {
+    Set<DatabaseResponse> infos = proxy.getDatabases();
+    JSONObject response = new JSONObject();
+    response.put("databases", infos);
+    return Response.ok(response).build();
+  }
+
+  @GET
+  @Path("databases/{database_id}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getDatabase(@PathParam("database_id") String databaseId) {
+    DatabaseResponse database = proxy.getDatabase(databaseId);
+    JSONObject response = new JSONObject();
+    response.put("database", database);
+    return Response.ok(response).build();
+  }
+
+
+  @GET
+  @Path("databases/{database_id}/tables")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getTables(@PathParam("database_id") String databaseName) {
+    Set<TableResponse> tables = proxy.getTables(databaseName);
+    JSONObject response = new JSONObject();
+    response.put("tables", tables);
+    return Response.ok(response).build();
+  }
+
+  @POST
+  @Path("databases/{database_id}/tables")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response createTable(@PathParam("database_id") String databaseName, TableMetaRequest request) {
+    try {
+      Job job = proxy.createTable(databaseName, request.tableInfo, getResourceManager());
+      JSONObject response = new JSONObject();
+      response.put("job", job);
+      return Response.status(Response.Status.ACCEPTED).entity(job).build();
+    } catch (ServiceException e) {
+      LOG.error("Exception occurred while creatint table for db {} with details : {}", databaseName, request.tableInfo, e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @POST
+  @Path("databases/{database_id}/tables/ddl")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response generateDDL(TableMetaRequest request, @QueryParam("query_type") String queryType) {
+    try {
+      String query = null;
+      if (queryType.equals(CREATE_TABLE)) {
+        query = proxy.generateCreateTableDDL(request.tableInfo.getDatabase(), request.tableInfo);
+      }else if(queryType.equals(ALTER_TABLE)){
+        query = proxy.generateAlterTableQuery(context, getHiveConnectionConfig(), request.tableInfo.getDatabase(), request.tableInfo.getTable(), request.tableInfo);
+      }else{
+        throw new ServiceException("query_type = '" + queryType + "' is not supported");
+      }
+      JSONObject response = new JSONObject();
+      response.put("ddl", new DDL(query));
+      return Response.status(Response.Status.ACCEPTED).entity(response).build();
+    } catch (ServiceException e) {
+      LOG.error("Exception occurred while generating {} ddl for : {}", queryType, request.tableInfo, e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @GET
+  @Path("databases/{database_id}/tables/{table_id}")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response getTable(@PathParam("database_id") String databaseName, @PathParam("table_id") String tableName) {
+    TableResponse table = proxy.getTable(databaseName, tableName);
+    JSONObject response = new JSONObject();
+    response.put("table", table);
+    return Response.ok(response).build();
+  }
+
+  /**
+   *
+   * @param databaseName
+   * @param oldTableName : this is required in case if the name of table itself is changed in tableMeta
+   * @param tableMetaRequest
+   * @return
+   */
+  @PUT
+  @Path("databases/{database_id}/tables/{table_id}")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response alterTable(@PathParam("database_id") String databaseName, @PathParam("table_id") String oldTableName, TableMetaRequest tableMetaRequest) {
+    try {
+      ConnectionConfig hiveConnectionConfig = getHiveConnectionConfig();
+      Job job = proxy.alterTable(context, hiveConnectionConfig, databaseName, oldTableName, tableMetaRequest.tableInfo, getResourceManager());
+      JSONObject response = new JSONObject();
+      response.put("job", job);
+      return Response.status(Response.Status.ACCEPTED).entity(job).build();
+    } catch (ServiceException e) {
+      LOG.error("Exception occurred while creatint table for db {} with details : {}", databaseName, tableMetaRequest.tableInfo, e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @DELETE
+  @Path("databases/{database_id}/tables/{table_id}")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response deleteTable(@PathParam("database_id") String databaseName, @PathParam("table_id") String tableName) {
+    try {
+      Job job = proxy.deleteTable(databaseName, tableName, getResourceManager());
+      JSONObject response = new JSONObject();
+      response.put("job", job);
+      return Response.status(Response.Status.ACCEPTED).entity(response).build();
+    } catch (ServiceException e) {
+      LOG.error("Exception occurred while deleting table for db {}, tableName : {}", databaseName, tableName, e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @GET
+  @Path("databases/{database_id}/tables/{table_id}/info")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getTableInfo(@PathParam("database_id") String databaseName, @PathParam("table_id") String tableName) {
+    ConnectionConfig hiveConnectionConfig = getHiveConnectionConfig();
+    TableMeta meta = proxy.getTableProperties(context, hiveConnectionConfig, databaseName, tableName);
+    JSONObject response = new JSONObject();
+    response.put("tableInfo", meta);
+    return Response.ok(response).build();
+  }
+
+  public static class DDL {
+    String query;
+
+    public DDL(String query) {
+      this.query = query;
+    }
+  }
+
+  /**
+   * Wrapper class for table meta request
+   */
+  public static class TableMetaRequest {
+    public TableMeta tableInfo;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
new file mode 100644
index 0000000..96e9554
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.browser;
+
+import org.apache.ambari.view.commons.hdfs.FileOperationService;
+import org.apache.ambari.view.hive20.BaseService;
+
+import javax.ws.rs.Path;
+
+/**
+ *
+ */
+public class FileService extends BaseService {
+
+  @Path("/ops")
+  public FileOperationService fileOps() {
+    return new FileOperationService(context);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
new file mode 100644
index 0000000..274ea20
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
@@ -0,0 +1,259 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.browser;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.ConnectionSystem;
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.client.Cursor;
+import org.apache.ambari.view.hive20.client.DDLDelegator;
+import org.apache.ambari.view.hive20.client.DDLDelegatorImpl;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.jobs.ResultsPaginationController;
+import org.apache.ambari.view.hive20.utils.BadRequestFormattedException;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.util.List;
+import java.util.concurrent.Callable;
+
+/**
+ * Database access resource
+ */
+public class HiveBrowserService extends BaseService {
+  @Inject
+  ViewResourceHandler handler;
+  @Inject
+  protected ViewContext context;
+
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(HiveBrowserService.class);
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response databases(@QueryParam("like") String like,
+                            @QueryParam("first") String fromBeginning,
+                            @QueryParam("count") Integer count,
+                            @QueryParam("columns") final String requestedColumns) {
+    if (like == null)
+      like = "*";
+    else
+      like = "*" + like + "*";
+    JSONObject response = new JSONObject();
+    ConnectionConfig hiveConnectionConfig = getHiveConnectionConfig();
+    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+    List<String> databases = delegator.getDbList(hiveConnectionConfig, like);
+    response.put("databases", databases);
+
+    return Response.ok(response).build();
+
+  }
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database.page")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response databasesPaginated(@QueryParam("like") String like,
+                                     @QueryParam("first") String fromBeginning,
+                                     @QueryParam("count") Integer count,
+                                     @QueryParam("searchId") String searchId,
+                                     @QueryParam("format") String format,
+                                     @QueryParam("columns") final String requestedColumns) {
+    if (like == null)
+      like = "*";
+    else
+      like = "*" + like + "*";
+    String curl = null;
+    try {
+      final String finalLike = like;
+      final DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+      return ResultsPaginationController.getInstance(context)
+          .request("databases", searchId, false, fromBeginning, count, format, requestedColumns,
+            new Callable<Cursor<Row, ColumnDescription>>() {
+              @Override
+              public Cursor<Row, ColumnDescription> call() throws Exception {
+                return delegator.getDbListCursor(getHiveConnectionConfig(), finalLike);
+              }
+            }).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (IllegalArgumentException ex) {
+      throw new BadRequestFormattedException(ex.getMessage(), ex);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+    }
+  }
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database/{db}/table")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response tablesInDatabase(@PathParam("db") String db,
+                                   @QueryParam("like") String like,
+                                   @QueryParam("first") String fromBeginning,
+                                   @QueryParam("count") Integer count,
+                                   @QueryParam("columns") final String requestedColumns) {
+    if (like == null)
+      like = "*";
+    else
+      like = "*" + like + "*";
+
+    JSONObject response = new JSONObject();
+    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+    List<String> tables = delegator.getTableList(getHiveConnectionConfig(), db, like);
+    response.put("tables", tables);
+    response.put("database", db);
+    return Response.ok(response).build();
+
+  }
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database/{db}/table.page")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response tablesInDatabasePaginated(@PathParam("db") final String db,
+                                            @QueryParam("like") String like,
+                                            @QueryParam("first") String fromBeginning,
+                                            @QueryParam("count") Integer count,
+                                            @QueryParam("searchId") String searchId,
+                                            @QueryParam("format") String format,
+                                            @QueryParam("columns") final String requestedColumns) {
+    if (like == null)
+      like = "*";
+    else
+      like = "*" + like + "*";
+    String curl = null;
+    try {
+      final String finalLike = like;
+      final DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+      try {
+        return ResultsPaginationController.getInstance(context)
+          .request(db + ":tables:", searchId, false, fromBeginning, count, format, requestedColumns,
+            new Callable<Cursor<Row, ColumnDescription>>() {
+              @Override
+              public Cursor<Row, ColumnDescription> call() throws Exception {
+                return delegator.getTableListCursor(getHiveConnectionConfig(), db, finalLike);
+              }
+            }).build();
+      } catch (Exception ex) {
+        throw new ServiceFormattedException(ex.getMessage(), ex);
+      }
+
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (IllegalArgumentException ex) {
+      throw new BadRequestFormattedException(ex.getMessage(), ex);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+    }
+  }
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database/{db}/table/{table}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response describeTable(@PathParam("db") String db,
+                                @PathParam("table") String table,
+                                @QueryParam("like") String like,
+                                @QueryParam("columns") String requestedColumns,
+                                @QueryParam("extended") String extended) {
+    boolean extendedTableDescription = (extended != null && extended.equals("true"));
+    String curl = null;
+    try {
+      JSONObject response = new JSONObject();
+      DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+      List<ColumnDescription> descriptions = delegator.getTableDescription(getHiveConnectionConfig(), db, table, "%", extendedTableDescription);
+      response.put("columns", descriptions);
+      response.put("database", db);
+      response.put("table", table);
+
+      //TODO: New implementation
+
+      return Response.ok(response).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (IllegalArgumentException ex) {
+      throw new BadRequestFormattedException(ex.getMessage(), ex);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+    }
+  }
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database/{db}/table/{table}.page")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response describeTablePaginated(@PathParam("db") final String db,
+                                         @PathParam("table") final String table,
+                                         @QueryParam("like") String like,
+                                         @QueryParam("first") String fromBeginning,
+                                         @QueryParam("searchId") String searchId,
+                                         @QueryParam("count") Integer count,
+                                         @QueryParam("format") String format,
+                                         @QueryParam("columns") final String requestedColumns) {
+    if (like == null)
+      like = ".*";
+    else
+      like = ".*" + like + ".*";
+    final String finalLike = like;
+
+    final DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+    try {
+      return ResultsPaginationController.getInstance(context)
+        .request(db + ":tables:" + table + ":columns", searchId, false, fromBeginning, count, format, requestedColumns,
+          new Callable<Cursor<Row, ColumnDescription>>() {
+            @Override
+            public Cursor<Row, ColumnDescription> call() throws Exception {
+              return delegator.getTableDescriptionCursor(getHiveConnectionConfig(), db, table, finalLike, false);
+            }
+          }).build();
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileResource.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileResource.java
new file mode 100644
index 0000000..d8bf51f
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileResource.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.files;
+
+/**
+ * File bean
+ */
+public class FileResource {
+  private String filePath;
+  private String fileContent;
+  private boolean hasNext;
+  private long page;
+  private long pageCount;
+
+  public String getFilePath() {
+    return filePath;
+  }
+
+  public void setFilePath(String filePath) {
+    this.filePath = filePath;
+  }
+
+  public String getFileContent() {
+    return fileContent;
+  }
+
+  public void setFileContent(String fileContent) {
+    this.fileContent = fileContent;
+  }
+
+  public boolean isHasNext() {
+    return hasNext;
+  }
+
+  public void setHasNext(boolean hasNext) {
+    this.hasNext = hasNext;
+  }
+
+  public long getPage() {
+    return page;
+  }
+
+  public void setPage(long page) {
+    this.page = page;
+  }
+
+  public long getPageCount() {
+    return pageCount;
+  }
+
+  public void setPageCount(long pageCount) {
+    this.pageCount = pageCount;
+  }
+}
\ No newline at end of file


[07/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/helpers/to-json.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/helpers/to-json.js b/contrib/views/hive20/src/main/resources/ui/app/helpers/to-json.js
new file mode 100644
index 0000000..a5de322
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/helpers/to-json.js
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export function toJson([param, ...rest]/*, hash*/) {
+  return JSON.stringify(param, null, '  ');
+}
+
+export default Ember.Helper.helper(toJson);

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/index.html
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/index.html b/contrib/views/hive20/src/main/resources/ui/app/index.html
new file mode 100644
index 0000000..cc9141f
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/index.html
@@ -0,0 +1,43 @@
+<!--
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+  <head>
+    <meta charset="utf-8">
+    <meta http-equiv="X-UA-Compatible" content="IE=edge">
+    <title>Ui</title>
+    <meta name="description" content="">
+    <meta name="viewport" content="width=device-width, initial-scale=1">
+
+    {{content-for "head"}}
+
+    <link rel="stylesheet" href="assets/vendor.css">
+    <link rel="stylesheet" href="assets/ui.css">
+
+    {{content-for "head-footer"}}
+  </head>
+  <body>
+    {{content-for "body"}}
+
+    <script src="assets/vendor.js"></script>
+    <script src="assets/ui.js"></script>
+
+    {{content-for "body-footer"}}
+  </body>
+</html>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/initializers/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/initializers/.gitkeep b/contrib/views/hive20/src/main/resources/ui/app/initializers/.gitkeep
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/initializers/responsive.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/initializers/responsive.js b/contrib/views/hive20/src/main/resources/ui/app/initializers/responsive.js
new file mode 100644
index 0000000..6e15f15
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/initializers/responsive.js
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { initialize } from 'ember-responsive/initializers/responsive';
+
+/**
+ * Ember responsive initializer
+ *
+ * Supports auto injecting media service app-wide.
+ *
+ * Generated by the ember-responsive addon. Customize initialize to change
+ * injection.
+ */
+
+export default {
+  name: 'responsive',
+  initialize
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/mixins/table-common.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/mixins/table-common.js b/contrib/views/hive20/src/main/resources/ui/app/mixins/table-common.js
new file mode 100644
index 0000000..4a2f517
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/mixins/table-common.js
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import Table from 'ember-light-table';
+
+const {
+  inject,
+  isEmpty
+  } = Ember;
+
+export default Ember.Mixin.create({
+  store: inject.service(),
+
+  page: 0,
+  limit: 10,
+  dir: 'asc',
+  sort: 'firstName',
+
+  isLoading: false,
+  canLoadMore: true,
+
+  model: null,
+  columns: null,
+  table: null,
+
+  init() {
+    this._super(...arguments);
+
+    let table = new Table(this.get('columns'), this.get('model'), { enableSync: true });
+    let sortColumn = table.get('allColumns').findBy('valuePath', this.get('sort'));
+
+    // Setup initial sort column
+    if (sortColumn) {
+      sortColumn.set('sorted', true);
+    }
+
+    this.set('table', table);
+  },
+
+  fetchRecords() {
+    this.set('isLoading', true);
+    this.get('store').query('user', this.getProperties(['page', 'limit', 'sort', 'dir'])).then((records) => {
+      this.get('model').pushObjects(records.toArray());
+      this.set('canLoadMore', !isEmpty(records));
+    }).finally(() => {
+      this.set('isLoading', false);
+    });
+  },
+
+  actions: {
+    onScrolledToBottom() {
+      if (this.get('canLoadMore')) {
+        this.incrementProperty('page');
+        this.fetchRecords();
+      }
+    },
+
+    onColumnClick(column) {
+      if (column.sorted) {
+        this.setProperties({
+          dir: column.ascending ? 'asc' : 'desc',
+          sort: column.get('valuePath'),
+          canLoadMore: true,
+          page: 0
+        });
+        this.get('model').clear();
+      }
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/models/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/.gitkeep b/contrib/views/hive20/src/main/resources/ui/app/models/.gitkeep
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/models/alert.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/alert.js b/contrib/views/hive20/src/main/resources/ui/app/models/alert.js
new file mode 100644
index 0000000..f7d30a3
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/alert.js
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.Model.extend({
+  type: DS.attr('string'),
+  message: DS.attr('string'),
+  responseMessage: DS.attr('string'),
+  status: DS.attr('number'),
+  trace: DS.attr('string'),
+  read: DS.attr('string', {defaultValue: false})
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/models/column.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/column.js b/contrib/views/hive20/src/main/resources/ui/app/models/column.js
new file mode 100644
index 0000000..b1fa99c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/column.js
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import datatypes from '../configs/datatypes';
+import Helper from '../configs/helpers';
+
+export default Ember.Object.extend({
+  name: '',
+  type: datatypes[0],
+  precision: null,
+  scale: null,
+  isPartitioned: false,
+  isClustered: false,
+  comment: '',
+
+
+  hasError: Ember.computed('errors.@each', function() { return this.get('errors.length') !== 0; }),
+  errors: [],
+
+  nameError: Ember.computed('errors.@each', function() {
+    return this.get('errors').findBy('type', 'name');
+  }),
+
+  typeError: Ember.computed('errors.@each', function() {
+    return this.get('errors').findBy('type', 'type');
+  }),
+
+
+  precisionError: Ember.computed('errors.@each', function() {
+    return this.get('errors').findBy('type', 'precision');
+  }),
+
+  scaleError: Ember.computed('errors.@each', function() {
+    return this.get('errors').findBy('type', 'scale');
+  }),
+
+  partitionObserver: Ember.observer('isPartitioned', function() {
+    if(this.get('isPartitioned')) {
+      this.set('isClustered', false);
+    }
+  }),
+
+  clusteredObserver: Ember.observer('isClustered', function() {
+    if(this.get('isClustered')) {
+      this.set('isPartitioned', false);
+    }
+  }),
+
+
+  // Control the UI
+  editing: false,
+
+  clearError() {
+    this.set('errors', []);
+  },
+
+
+  validate() {
+    this.clearError();
+    if (Ember.isEmpty(this.get('name'))) {
+      this.get('errors').pushObject({type: 'name', error: "name cannot be empty"});
+    }
+
+    if(Ember.isEmpty(this.get('type'))) {
+      this.get('errors').pushObject({type: 'type', error: "Type cannot be empty"});
+    }
+
+    if(this.get('type.hasPrecision')) {
+      if(Ember.isEmpty(this.get('precision'))) {
+        this.get('errors').pushObject({type: 'precision', error: "Precision cannot be empty"});
+      } else if(!Helper.isInteger(this.get('precision'))) {
+        this.get('errors').pushObject({type: 'precision', error: "Precision can only be a number"});
+      } else if(this.get('precision') <= 0) {
+        this.get('errors').pushObject({type: 'precision', error: "Precision can only be greater than zero"});
+      }
+
+    }
+
+    if(this.get('type.hasScale')) {
+      if(Ember.isEmpty(this.get('scale'))) {
+        this.get('errors').pushObject({type: 'scale', error: "Scale cannot be empty"});
+      } else if(!Helper.isInteger(this.get('scale'))) {
+        this.get('errors').pushObject({type: 'scale', error: "Scale can only be a number"});
+      } else if(this.get('scale') <= 0) {
+        this.get('errors').pushObject({type: 'scale', error: "Scale can only be greater than zero"});
+      }
+
+    }
+    return this.get('errors.length') === 0;
+  }
+})

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/models/database.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/database.js b/contrib/views/hive20/src/main/resources/ui/app/models/database.js
new file mode 100644
index 0000000..b9e603a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/database.js
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.Model.extend({
+  name: DS.attr('string'),
+  tables: DS.hasMany('table'),
+  selected: DS.attr('boolean', {transient: true, defaultValue: false})
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/models/file.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/file.js b/contrib/views/hive20/src/main/resources/ui/app/models/file.js
new file mode 100644
index 0000000..c13d4e1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/file.js
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.Model.extend({
+  fileContent: DS.attr(),
+  hasNext: DS.attr(),
+  page: DS.attr('number'),
+  pageCount: DS.attr()
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/models/job.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/job.js b/contrib/views/hive20/src/main/resources/ui/app/models/job.js
new file mode 100644
index 0000000..76132d1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/job.js
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.Model.extend({
+  title: DS.attr('string'),
+  queryId: DS.attr(),
+  hiveQueryId: DS.attr('string'),
+  queryFile: DS.attr('string'),
+  owner: DS.attr('string'),
+  dataBase: DS.attr('string'),
+  duration: DS.attr(),
+  status: DS.attr('string'),
+  statusMessage: DS.attr('string'),
+  dateSubmitted: DS.attr('date'),
+  forcedContent: DS.attr('string'),
+  logFile: DS.attr('string'),
+  dagName:  DS.attr('string'),
+  dagId: DS.attr('string'),
+  sessionTag: DS.attr('string'),
+  page: DS.attr(),
+  statusDir: DS.attr('string'),
+  applicationId: DS.attr(),
+  referrer: DS.attr('string'),
+  confFile: DS.attr('string'),
+  globalSettings: DS.attr('string'),
+
+  dateSubmittedTimestamp: function () {
+    var date = this.get('dateSubmitted');
+
+    return date; // ? date * 1000 : date; now dateSubmitted itself is in miliseconds. so conversion not required.
+  }.property('dateSubmitted'),
+
+  uppercaseStatus: function () {
+    var status = this.get('status');
+
+    return status ? status.toUpperCase() : status;
+  }.property('status')
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/models/setting.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/setting.js b/contrib/views/hive20/src/main/resources/ui/app/models/setting.js
new file mode 100644
index 0000000..49d2c89
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/setting.js
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.Model.extend({
+  key: DS.attr('string'),
+  value: DS.attr('string'),
+  editMode: false
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/models/table-info.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/table-info.js b/contrib/views/hive20/src/main/resources/ui/app/models/table-info.js
new file mode 100644
index 0000000..85306b6
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/table-info.js
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.Model.extend({
+  database: DS.attr('string'),
+  table: DS.attr('string'),
+  columns: DS.attr(),
+  ddl: DS.attr('string'),
+  partitionInfo: DS.attr(),
+  detailedInfo: DS.attr(),
+  storageInfo: DS.attr(),
+  viewInfo: DS.attr()
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/models/table-property.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/table-property.js b/contrib/views/hive20/src/main/resources/ui/app/models/table-property.js
new file mode 100644
index 0000000..751f55c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/table-property.js
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Object.extend({
+  key: '',
+  value: '',
+
+
+  hasError: Ember.computed('errors', function() { return this.get('errors.length') !== 0; }),
+  errors: [],
+
+  keyError: Ember.computed('errors.@each', function() {
+    return this.get('errors').findBy('type', 'key');
+  }),
+
+  valueError: Ember.computed('errors.@each', function() {
+    return this.get('errors').findBy('type', 'value');
+  }),
+
+
+  // Control the UI
+  editing: false,
+
+
+  validate() {
+    this.set('errors', []);
+    if (Ember.isEmpty(this.get('key'))) {
+      this.get('errors').pushObject({type: 'key', error: "Name cannot be empty"});
+    }
+
+    if(Ember.isEmpty(this.get('value'))) {
+      this.get('errors').pushObject({type: 'value', error: "Value cannot be empty"});
+    }
+
+    return this.get('errors.length') === 0;
+  }
+})

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/models/table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/table.js b/contrib/views/hive20/src/main/resources/ui/app/models/table.js
new file mode 100644
index 0000000..3fdd21a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/table.js
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.Model.extend({
+  name: DS.attr('string'),
+  type: DS.attr('string'),
+  database: DS.belongsTo('database'),
+  selected: false,
+  icon: Ember.computed('type', function() {
+    if(this.get('type').toLowerCase() === 'view') {
+      return "eye";
+    } else {
+      return "table";
+    }
+  })
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/resolver.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/resolver.js b/contrib/views/hive20/src/main/resources/ui/app/resolver.js
new file mode 100644
index 0000000..b9eabe4
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/resolver.js
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Resolver from 'ember-resolver';
+
+export default Resolver;

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/router.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/router.js b/contrib/views/hive20/src/main/resources/ui/app/router.js
new file mode 100644
index 0000000..46150f4
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/router.js
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import config from './config/environment';
+
+const Router = Ember.Router.extend({
+  location: config.locationType,
+  rootURL: config.rootURL
+});
+
+Router.map(function() {
+  this.route('jobs');
+  this.route('udfs');
+  this.route('settings');
+  this.route('savedqueries');
+
+  this.route('databases', function() {
+    this.route('database', {path: '/:databaseId'}, function() {
+      this.route('tables', {path: '/tables'}, function() {
+        this.route('new');
+        this.route('table', {path: '/:name'}, function() {
+          this.route('columns');
+          this.route('partitions');
+          this.route('storage');
+          this.route('details');
+          this.route('view');
+          this.route('ddl');
+          this.route('stats');
+        })
+      });
+    });
+  });
+  this.route('messages', function() {
+    this.route('message', {path: '/:message_id'});
+  });
+
+  this.route('query', function() {
+
+  });
+
+});
+
+export default Router;

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/.gitkeep b/contrib/views/hive20/src/main/resources/ui/app/routes/.gitkeep
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/application.js b/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
new file mode 100644
index 0000000..93f5e61
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import tabs from '../configs/top-level-tabs';
+
+export default Ember.Route.extend({
+  keepAlive: Ember.inject.service('keep-alive'),
+  init: function() {
+    this._super(...arguments);
+    this.get('keepAlive').initialize();
+  },
+  setupController: function(controller, model) {
+    this._super(controller, model);
+    controller.set('tabs', tabs);
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases.js
new file mode 100644
index 0000000..6d25abd
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases.js
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+
+  model() {
+    return this.store.findAll('database');
+  },
+
+  afterModel(model) {
+    if (model.get('length') > 0) {
+      this.selectDatabase(model);
+    }
+  },
+
+  setupController(controller, model) {
+    let sortedModel = model.sortBy('name');
+    let selectedModel = sortedModel.filterBy('selected', true).get('firstObject');
+    sortedModel.removeObject(selectedModel);
+    let finalList = [];
+    finalList.pushObject(selectedModel);
+    finalList.pushObjects(sortedModel);
+    controller.set('model', finalList);
+  },
+
+  selectDatabase(model) {
+    // check if default database is present
+    let toSelect = model.findBy('name', 'default');
+    if (Ember.isEmpty(toSelect)) {
+      let sortedModel = model.sortBy('name');
+      toSelect = sortedModel.get('firstObject');
+    }
+    toSelect.set('selected', true);
+  },
+
+  actions: {
+    databaseSelected(database) {
+      this.transitionTo('databases.database.tables', database.get('id'));
+    },
+
+    dropDatabase() {
+      let databases = this.get('controller.model');
+      let selectedModel = databases.filterBy('selected', true).get('firstObject');
+      if (Ember.isEmpty(selectedModel)) {
+        return;
+      }
+
+      this.get('controller').set('databaseName', selectedModel.get('name'));
+
+      if (selectedModel.get('tables.length') > 0) {
+        this.get('controller').set('databaseNotEmpty', true);
+        console.log('database not empty');
+        return;
+      }
+      this.get('controller').set('confirmDropDatabase', true);
+    },
+
+    createTable() {
+      console.log("Table created");
+    },
+
+    notEmptyDialogClosed() {
+      this.get('controller').set('databaseNotEmpty', false);
+      this.get('controller').set('databaseName', undefined);
+    },
+
+    databaseDropConfirmed() {
+      console.log('drop confirmed');
+      this.get('controller').set('confirmDropDatabase', false);
+      this.get('controller').set('databaseName', undefined);
+    },
+
+    databaseDropDeclined() {
+      console.log('drop declined');
+      this.get('controller').set('confirmDropDatabase', false);
+      this.get('controller').set('databaseName', undefined);
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database.js
new file mode 100644
index 0000000..6a4b70b
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database.js
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+  model(params) {
+    return this.store.find('database', params.databaseId);
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/index.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/index.js
new file mode 100644
index 0000000..58336df
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/index.js
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+  beforeModel() {
+    let selectedDatabase = this.modelFor('databases.database');
+    this.transitionTo('databases.database.tables', selectedDatabase.get('name'));
+
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables.js
new file mode 100644
index 0000000..a121b88
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables.js
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+  model() {
+    let selectedDatabase = this.modelFor('databases.database');
+    return this.store.query('table', {databaseId: selectedDatabase.get('name')});
+  },
+
+  afterModel(model) {
+    if (model.get('length') > 0) {
+      this.selectTable(model);
+    }
+  },
+  selectTable(model) {
+    let sortedModel = model.sortBy('name');
+    let toSelect = sortedModel.get('firstObject');
+    toSelect.set('selected', true);
+  },
+  actions: {
+    tableSelected(table) {
+      let tables = this.controllerFor('databases.database.tables').get('model');
+      tables.forEach((table) => {
+        table.set('selected', false);
+      });
+      table.set('selected', true);
+      this.transitionTo('databases.database.tables.table', table.get('name'));
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/index.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/index.js
new file mode 100644
index 0000000..e5943b1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/index.js
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+  beforeModel() {
+    let selectedTable = this.modelFor('databases.database.tables').filterBy('selected', true).get('firstObject');
+    if (!Ember.isEmpty(selectedTable)) {
+      this.transitionTo('databases.database.tables.table', selectedTable.get('name'));
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js
new file mode 100644
index 0000000..f648ef6
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import tabs from '../../../../configs/create-table-tabs';
+
+export default Ember.Route.extend({
+  createTable: Ember.inject.service(),
+
+  setupController(controller, model) {
+    this._super(controller, model);
+    controller.set('tabs', Ember.copy(tabs));
+  },
+
+  actions: {
+    cancel() {
+      let databaseController = this.controllerFor('databases.database');
+      this.transitionTo('databases.database', databaseController.get('model'));
+    },
+
+    create(settings) {
+      debugger;
+      this.controller.set('showCreateTableModal', true);
+      this.controller.set('createTableMessage', 'Submitting request to create table');
+      let databaseModel = this.controllerFor('databases.database').get('model');
+      this.get('createTable').submitCreateTable(databaseModel.get('name'), settings)
+        .then((job) => {
+          console.log('Created job: ', job.get('id'));
+          this.controller.set('createTableMessage', 'Waiting for the table to be created');
+          this.get('createTable').waitForJobToComplete(job.get('id'), 5 * 1000)
+            .then((status) => {
+              this.controller.set('createTableMessage', "Successfully created table");
+              Ember.run.later(() => {
+                this.controller.set('showCreateTableModal', false);
+                this.controller.set('createTableMessage');
+                this._transitionToCreatedTable(databaseModel.get('name'), settings.name);
+              }, 2 * 1000);
+            }, (error) => {
+              // TODO: handle error
+              Ember.run.later(() => {
+                this.controller.set('showCreateTableModal', false);
+                this.controller.set('createTableMessage');
+                this.transitionTo('databases.database', databaseModel.get('name'));
+              }, 2 * 1000);
+            });
+        }, (error) => {
+          console.log("Error encountered", error);
+          this.controller.set('showCreateTableModal', true);
+        });
+    }
+  },
+
+  _transitionToCreatedTable(database, table) {
+    this.transitionTo('databases.database.tables.table', database, table);
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js
new file mode 100644
index 0000000..5a88bd2
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table.js
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import tabs from '../../../../configs/table-level-tabs';
+
+export default Ember.Route.extend({
+  model(params) {
+    let database = this.modelFor('databases.database').get('name');
+    let table = params.name;
+    return this.store.queryRecord('tableInfo', {databaseId: database, tableName: table});
+  },
+
+  setupController: function (controller, model) {
+    this._super(controller, model);
+    let newTabs = Ember.copy(tabs);
+    if (Ember.isEmpty(model.get('partitionInfo'))) {
+      newTabs = newTabs.rejectBy('name', 'partitions');
+    }
+
+    console.log(model.get('detailedInfo.tableType').toLowerCase());
+    if (model.get('detailedInfo.tableType').toLowerCase().indexOf('view') === -1) {
+      newTabs = newTabs.rejectBy('name', 'viewInfo');
+    }
+    controller.set('tabs', newTabs);
+  },
+
+  actions: {}
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/columns.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/columns.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/columns.js
new file mode 100644
index 0000000..a11a4de
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/columns.js
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import TableMetaRouter from './table-meta-router';
+
+export default TableMetaRouter.extend({
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/ddl.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/ddl.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/ddl.js
new file mode 100644
index 0000000..614eeec
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/ddl.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import TableMetaRouter from './table-meta-router';
+
+export default TableMetaRouter.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/details.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/details.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/details.js
new file mode 100644
index 0000000..614eeec
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/details.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import TableMetaRouter from './table-meta-router';
+
+export default TableMetaRouter.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/index.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/index.js
new file mode 100644
index 0000000..b039a79
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/index.js
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+  beforeModel() {
+    this.transitionTo('databases.database.tables.table.columns');
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/partitions.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/partitions.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/partitions.js
new file mode 100644
index 0000000..614eeec
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/partitions.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import TableMetaRouter from './table-meta-router';
+
+export default TableMetaRouter.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/stats.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/stats.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/stats.js
new file mode 100644
index 0000000..614eeec
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/stats.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import TableMetaRouter from './table-meta-router';
+
+export default TableMetaRouter.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/storage.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/storage.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/storage.js
new file mode 100644
index 0000000..614eeec
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/storage.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import TableMetaRouter from './table-meta-router';
+
+export default TableMetaRouter.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/table-meta-router.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/table-meta-router.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/table-meta-router.js
new file mode 100644
index 0000000..06b91ae
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/table-meta-router.js
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+  setupController: function(controller, model) {
+    this._super(controller, model);
+    let table = this.controllerFor('databases.database.tables.table').get('model');
+    controller.set('table', table);
+  },
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/view.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/view.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/view.js
new file mode 100644
index 0000000..614eeec
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/table/view.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import TableMetaRouter from './table-meta-router';
+
+export default TableMetaRouter.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/databases/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/index.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/index.js
new file mode 100644
index 0000000..493ff10
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/index.js
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+ beforeModel() {
+   let selectedDatabase = this.modelFor('databases').filterBy('selected', true).get('firstObject');
+   this.transitionTo('databases.database', selectedDatabase.get('id'));
+ }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js
new file mode 100644
index 0000000..a2ebc25
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+  moment: Ember.inject.service(),
+  timeInitializedTo: null,
+  queryParams: {
+    startTime: {
+      refreshModel: true
+    },
+    endTime: {
+      refreshModel: true
+    }
+  },
+
+
+  model(params) {
+    let now = this.get('moment').moment();
+    if(Ember.isEmpty(params.startTime) || Ember.isEmpty(params.endTime)) {
+      let initialValue = now.clone();
+      params.endTime = now.valueOf();
+      params.startTime = now.subtract('7', 'days').valueOf();
+      this.set('timeInitializedTo', initialValue);
+    }
+
+    return this.store.query('job', params);
+  },
+
+  setupController(controller, model) {
+    if(!Ember.isEmpty(this.get('timeInitializedTo'))) {
+
+      controller.set('endTime', this.get('timeInitializedTo').valueOf());
+      controller.set('startTime', this.get('timeInitializedTo').subtract('7', 'days').valueOf());
+      //unset timeInitializedTo
+      this.set('timeInitializedTo');
+    }
+
+    this._super(...arguments);
+
+  },
+
+  actions: {
+    dateFilterChanged(startTime, endTime) {
+      this.controller.set('startTime', this.get('moment').moment(startTime, 'YYYY-MM-DD').valueOf())
+      this.controller.set('endTime', this.get('moment').moment(endTime, 'YYYY-MM-DD').valueOf())
+      this.refresh();
+    }
+  }
+
+
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/messages.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/messages.js b/contrib/views/hive20/src/main/resources/ui/app/routes/messages.js
new file mode 100644
index 0000000..1e413ed
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/messages.js
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+  logger: Ember.inject.service('alert-messages'),
+  model: function() {
+    return this.store.peekAll('alert');
+  },
+  setupController: function(controller, model) {
+    this._super(controller, model);
+    this.get('logger').clearMessages();
+    this.get('logger').setUnreadMessagesToRead();
+    controller.set('isExpanded', true);
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/messages/message.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/messages/message.js b/contrib/views/hive20/src/main/resources/ui/app/routes/messages/message.js
new file mode 100644
index 0000000..84be53b
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/messages/message.js
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+  model: function(params) {
+    return this.store.peekRecord('alert', params.message_id);
+  },
+
+  setupController: function(controller, model) {
+    this._super(controller, model);
+    var messagesController = this.controllerFor('messages');
+    messagesController.set('isExpanded', false);
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/query.js
new file mode 100644
index 0000000..699b64c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/query.js
@@ -0,0 +1,280 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+
+  model() {
+    return this.store.findAll('database');
+  },
+
+  query: Ember.inject.service(),
+
+  afterModel(model) {
+    if (model.get('length') > 0) {
+      this.selectDatabase(model);
+    }
+  },
+
+  setupController(controller, model) {
+
+    let sortedModel = model.sortBy('name');
+    let selectedModel = sortedModel.filterBy('selected', true).get('firstObject');
+    sortedModel.removeObject(selectedModel);
+    let finalList = [];
+    let selectedDB = [];
+
+    finalList.pushObject(selectedModel);
+    finalList.pushObjects(sortedModel);
+    controller.set('model', finalList);
+
+    selectedDB.pushObject(selectedModel);
+    controller.set('selectedModel',selectedDB);
+
+    let selecteDBName = selectedModel.get('name');
+
+    let self = this;
+    let selectedTablesModels =[];
+
+    selectedTablesModels.pushObject(
+      {
+        'dbname': selecteDBName ,
+        'tables': this.store.query('table', {databaseId: selecteDBName})
+      }
+    )
+
+    controller.set('selectedTablesModels',selectedTablesModels );
+    controller.set('currentQuery', '');
+    controller.set('isQueryRunning', false);
+    controller.set('defaultQueryResult', {'schema' :[], 'rows' :[]});
+    controller.set('queryResult', controller.get('defaultQueryResult'));
+    controller.set('showPreviousButton', false);
+
+    //For Pagination
+    localStorage.setItem("jobData", JSON.stringify([]));
+    controller.set('prevPage', -1 );
+    controller.set('currentPage', 0 );
+    controller.set('nextPage', 1 );
+    controller.set('currentJobData', null );
+
+  },
+
+  selectDatabase(model) {
+    // check if default database is present
+    let toSelect = model.findBy('name', 'default');
+    if (Ember.isEmpty(toSelect)) {
+      let sortedModel = model.sortBy('name');
+      toSelect = sortedModel.get('firstObject');
+    }
+    toSelect.set('selected', true);
+  },
+
+  actions: {
+
+    xyz(selectedDBs){
+
+      let self = this;
+      let selectedTablesModels =[];
+
+      selectedDBs.forEach(function(db){
+        selectedTablesModels.pushObject(
+          {
+            'dbname': db ,
+            'tables':self.store.query('table', {databaseId: db})
+          }
+        )
+      });
+
+      this.get('controller').set('selectedTablesModels', selectedTablesModels );
+
+    },
+
+    tableSelected(){
+      console.log('I am in tableSelected');
+    },
+
+    showTables(db){
+      //should we do this by writing a seperate component.
+      $('.collapse').hide();
+      $('#' + db).toggle();
+    },
+
+    notEmptyDialogClosed() {
+      this.get('controller').set('databaseNotEmpty', false);
+      this.get('controller').set('databaseName', undefined);
+    },
+
+    executeQuery(isFirstCall){
+
+      let self = this;
+      let queryInput = this.get('controller').get('currentQuery');
+      let dbid = this.get('controller').get('selectedTablesModels')[0]['dbname']
+
+      self.get('controller').set('isQueryRunning', true);
+      self.get('controller').set('queryResult', self.get('controller').get('defaultQueryResult'));
+
+      let payload ={
+        "title":"Worksheet",
+        "hiveQueryId":null,
+        "queryFile":null,
+        "owner":null,
+        "dataBase":dbid,
+        "status":null,
+        "statusMessage":null,
+        "dateSubmitted":null,
+        "forcedContent":queryInput,
+        "logFile":null,
+        "dagName":null,
+        "dagId":null,
+        "sessionTag":null,
+        "statusDir":null,
+        "referrer":"job",
+        "confFile":null,
+        "globalSettings":""};
+
+
+      this.get('query').createJob(payload).then(function(data) {
+        // applying a timeout otherwise it goes for status code 409, although that condition is also handled in the code.
+        setTimeout(function(){
+          self.get('controller').set('currentJobData', data);
+          self.send('getJob', data);
+        }, 2000);
+      }, function(reason) {
+        console.log(reason);
+      });
+
+    },
+    getJob(data){
+
+      var self = this;
+      var data = data;
+
+      let jobId = data.job.id;
+      let dateSubmitted = data.job.dateSubmitted;
+
+      this.get('query').getJob(jobId, dateSubmitted, true).then(function(data) {
+        // on fulfillment
+        console.log('getJob route', data );
+        self.get('controller').set('queryResult', data);
+        self.get('controller').set('isQueryRunning', false);
+
+        let localArr = JSON.parse(localStorage.getItem("jobData"));
+        localArr.push(data);
+        localStorage.setItem("jobData", JSON.stringify(localArr));
+
+        self.get('controller').set('currentPage', localArr.length);
+        self.get('controller').set('prevPage', localArr.length-1);
+
+
+      }, function(reason) {
+        // on rejection
+        console.log('reason' , reason);
+
+        if( reason.errors[0].status == 409 ){
+          setTimeout(function(){
+            self.send('getJob',data);
+          }, 2000);
+        }
+      });
+
+    },
+    goNextPage(){
+
+      let nextPage = this.get('controller').get('nextPage');
+      let totalPages = JSON.parse(localStorage.getItem("jobData")).length;
+
+      if(nextPage >= totalPages){
+        var self = this;
+        var data = this.get('controller').get('currentJobData');
+
+        let jobId = data.job.id;
+        let dateSubmitted = data.job.dateSubmitted;
+
+
+        this.get('query').getJob(jobId, dateSubmitted, false).then(function(data) {
+          // on fulfillment
+          console.log('getJob route', data );
+          self.get('controller').set('queryResult', data);
+          self.get('controller').set('isQueryRunning', false);
+          self.get('controller').set('showPreviousButton', true);
+
+          let localArr = JSON.parse(localStorage.getItem("jobData"));
+          localArr.push(data);
+
+
+          localStorage.setItem("jobData", JSON.stringify(localArr));
+
+
+          self.get('controller').set('currentPage', localArr.length);
+          self.get('controller').set('prevPage', localArr.length-1);
+
+          self.get('controller').set('nextPage', localArr.length+1);
+
+        }, function(reason) {
+          // on rejection
+          console.log('reason' , reason);
+
+          if( reason.errors[0].status == 409 ){
+            setTimeout(function(){
+              self.send('getJob',data);
+            }, 2000);
+          }
+        });
+      } else {
+
+        let currentPage = this.get('controller').get('currentPage');
+        let prevPage = this.get('controller').get('prevPage');
+        let nextPage = this.get('controller').get('nextPage');
+        let totalPages = JSON.parse(localStorage.getItem("jobData")).length;
+
+        if(nextPage < totalPages ){
+          this.get('controller').set('currentPage', currentPage+1 );
+          this.get('controller').set('prevPage', prevPage + 1 );
+          this.get('controller').set('nextPage', nextPage + 1);
+
+          this.get('controller').set('showPreviousButton', true);
+
+          this.get('controller').set('queryResult', JSON.parse(localStorage.getItem("jobData"))[this.get('controller').get('currentPage')] );
+        } else {
+
+          console.log('upper limit exceed');
+          this.send('goNextPage');
+        }
+      }
+    },
+    goPrevPage(){
+
+      let currentPage = this.get('controller').get('currentPage');
+      let prevPage = this.get('controller').get('prevPage');
+      let nextPage = this.get('controller').get('nextPage');
+      let totalPages = JSON.parse(localStorage.getItem("jobData")).length;
+
+      if(prevPage > -1){
+        this.get('controller').set('currentPage', currentPage-1 );
+        this.get('controller').set('prevPage', prevPage - 1 );
+        this.get('controller').set('nextPage', this.get('controller').get('currentPage')+1);
+
+        this.get('controller').set('queryResult', JSON.parse(localStorage.getItem("jobData"))[this.get('controller').get('currentPage')] );
+      } else {
+        //console.log('previous limit over');
+        this.get('controller').set('showPreviousButton', false);
+      }
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js b/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js
new file mode 100644
index 0000000..8719170
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/savedqueries.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/settings.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/settings.js b/contrib/views/hive20/src/main/resources/ui/app/routes/settings.js
new file mode 100644
index 0000000..1ce5116
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/settings.js
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import hiveParams from '../configs/hive-parameters'
+
+export default Ember.Route.extend({
+  model() {
+    return this.store.findAll('setting').then(settings => settings.toArray());
+  },
+  setupController(controller, model) {
+    this._super(...arguments);
+    const appendedHiveParams = this.prepareExhaustiveParameters(hiveParams, model);
+    controller.set('hiveParameters', appendedHiveParams);
+  },
+
+  prepareExhaustiveParameters(hiveParams, model) {
+    let newHiveParams = [];
+    newHiveParams.pushObjects(hiveParams);
+    model.forEach(x => {
+      let param = hiveParams.findBy('name', x.get('key'));
+
+      if(Ember.isEmpty(param)) {
+        newHiveParams.pushObject(
+          Ember.Object.create({name: x.get('key'), disabled: true})
+        );
+      } else {
+        param.set('disabled', true);
+      }
+    });
+    return newHiveParams;
+  },
+
+  actions: {
+    addNewSettings() {
+      let model = this.get('controller.model');
+      model.forEach(x => x.rollbackAttributes());
+      let newItem = this.store.createRecord('setting', {editMode: true});
+      model.pushObject(newItem);
+    },
+
+    editAction(setting) {
+      setting.set('editMode', true);
+    },
+
+    deleteAction(setting) {
+      return setting.destroyRecord().then(data => {
+        let model = this.get('controller.model');
+        model.removeObject(data);
+      }, err => {
+        console.log('error in deletion');
+      })
+    },
+
+    updateAction(newSetting) {
+      newSetting.save().then(data => {
+        console.log('saved', data);
+        data.set('editMode', false);
+      }, error => {
+        console.log('error', err);
+      })
+    },
+
+    cancelAction(newSetting) {
+      if (newSetting.get('isNew')) {
+        let model = this.get('controller.model');
+        model.removeObject(newSetting);
+      } else {
+        newSetting.set('editMode', false);
+      }
+    },
+
+    willTransition(transition) {
+      let unsavedModels = this.get('controller.model').filterBy('isNew', true);
+      unsavedModels.forEach(x => this.store.unloadRecord(x));
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/routes/udfs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/udfs.js b/contrib/views/hive20/src/main/resources/ui/app/routes/udfs.js
new file mode 100644
index 0000000..8719170
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/udfs.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/serializers/file.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/serializers/file.js b/contrib/views/hive20/src/main/resources/ui/app/serializers/file.js
new file mode 100644
index 0000000..416db0c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/serializers/file.js
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DS from 'ember-data';
+
+export default DS.RESTSerializer.extend({
+  primaryKey: 'filePath'
+});


[13/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileService.java
new file mode 100644
index 0000000..a3623e9
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileService.java
@@ -0,0 +1,266 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.files;
+
+import com.jayway.jsonpath.JsonPath;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.commons.hdfs.UserService;
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.utils.*;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.*;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+import java.util.HashMap;
+
+/**
+ * File access resource
+ * API:
+ * GET /:path
+ *      read entire file
+ * POST /
+ *      create new file
+ *      Required: filePath
+ *      file should not already exists
+ * PUT /:path
+ *      update file content
+ */
+public class FileService extends BaseService {
+  public static final String FAKE_FILE = "fakefile://";
+  public static final String JSON_PATH_FILE = "jsonpath:";
+
+  @Inject
+  ViewResourceHandler handler;
+
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(FileService.class);
+
+  /**
+   * Get single item
+   */
+  @GET
+  @Path("{filePath:.*}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getFilePage(@PathParam("filePath") String filePath, @QueryParam("page") Long page) throws IOException, InterruptedException {
+
+    LOG.debug("Reading file " + filePath);
+    try {
+      FileResource file = new FileResource();
+
+      if (page == null)
+        page = 0L;
+
+      if (filePath.startsWith(FAKE_FILE)) {
+        if (page > 1)
+          throw new IllegalArgumentException("There's only one page in fake files");
+
+        String encodedContent = filePath.substring(FAKE_FILE.length());
+        String content = new String(Base64.decodeBase64(encodedContent));
+
+        fillFakeFileObject(filePath, file, content);
+      } else if (filePath.startsWith(JSON_PATH_FILE)) {
+        if (page > 1)
+          throw new IllegalArgumentException("There's only one page in fake files");
+
+        String content = getJsonPathContentByUrl(filePath);
+        fillFakeFileObject(filePath, file, content);
+      } else  {
+
+        filePath = sanitizeFilePath(filePath);
+        FilePaginator paginator = new FilePaginator(filePath, getSharedObjectsFactory().getHdfsApi());
+
+        fillRealFileObject(filePath, page, file, paginator);
+      }
+
+      JSONObject object = new JSONObject();
+      object.put("file", file);
+      return Response.ok(object).status(200).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (FileNotFoundException ex) {
+      throw new NotFoundFormattedException(ex.getMessage(), ex);
+    } catch (IllegalArgumentException ex) {
+      throw new BadRequestFormattedException(ex.getMessage(), ex);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  protected String getJsonPathContentByUrl(String filePath) throws IOException {
+    URL url = new URL(filePath.substring(JSON_PATH_FILE.length()));
+
+    InputStream responseInputStream = context.getURLStreamProvider().readFrom(url.toString(), "GET",
+        (String)null, new HashMap<String, String>());
+    String response = IOUtils.toString(responseInputStream);
+
+    for (String ref : url.getRef().split("!")) {
+      response = JsonPath.read(response, ref);
+    }
+    return response;
+  }
+
+  public void fillRealFileObject(String filePath, Long page, FileResource file, FilePaginator paginator) throws IOException, InterruptedException {
+    file.setFilePath(filePath);
+    file.setFileContent(paginator.readPage(page));
+    file.setHasNext(paginator.pageCount() > page + 1);
+    file.setPage(page);
+    file.setPageCount(paginator.pageCount());
+  }
+
+  public void fillFakeFileObject(String filePath, FileResource file, String content) {
+    file.setFilePath(filePath);
+    file.setFileContent(content);
+    file.setHasNext(false);
+    file.setPage(0);
+    file.setPageCount(1);
+  }
+
+  /**
+   * Delete single item
+   */
+  @DELETE
+  @Path("{filePath:.*}")
+  public Response deleteFile(@PathParam("filePath") String filePath) throws IOException, InterruptedException {
+    try {
+      filePath = sanitizeFilePath(filePath);
+      LOG.debug("Deleting file " + filePath);
+      if (getSharedObjectsFactory().getHdfsApi().delete(filePath, false)) {
+        return Response.status(204).build();
+      }
+      throw new NotFoundFormattedException("FileSystem.delete returned false", null);
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Update item
+   */
+  @PUT
+  @Path("{filePath:.*}")
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response updateFile(FileResourceRequest request,
+                             @PathParam("filePath") String filePath) throws IOException, InterruptedException {
+    try {
+      filePath = sanitizeFilePath(filePath);
+      LOG.debug("Rewriting file " + filePath);
+      FSDataOutputStream output = getSharedObjectsFactory().getHdfsApi().create(filePath, true);
+      output.writeBytes(request.file.getFileContent());
+      output.close();
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Create script
+   */
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response createFile(FileResourceRequest request,
+                             @Context HttpServletResponse response, @Context UriInfo ui)
+      throws IOException, InterruptedException {
+    try {
+      LOG.debug("Creating file " + request.file.getFilePath());
+      try {
+        FSDataOutputStream output = getSharedObjectsFactory().getHdfsApi().create(request.file.getFilePath(), false);
+        if (request.file.getFileContent() != null) {
+          output.writeBytes(request.file.getFileContent());
+        }
+        output.close();
+      } catch (FileAlreadyExistsException ex) {
+        throw new ServiceFormattedException("F020 File already exists", ex, 400);
+      }
+      response.setHeader("Location",
+          String.format("%s/%s", ui.getAbsolutePath().toString(), request.file.getFilePath()));
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Checks connection to HDFS
+   * @param context View Context
+   */
+  public static void hdfsSmokeTest(ViewContext context) {
+    try {
+      HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+      api.getStatus();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Checks connection to User HomeDirectory
+   * @param context View Context
+   */
+  public static void userhomeSmokeTest(ViewContext context) {
+    try {
+      UserService userservice = new UserService(context);
+      userservice.homeDir();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Wrapper object for json mapping
+   */
+  public static class FileResourceRequest {
+    public FileResource file;
+  }
+
+  private String sanitizeFilePath(String filePath){
+    if (!filePath.startsWith("/") && !filePath.startsWith(".")) {
+      filePath = "/" + filePath;  // some servers strip double slashes in URL
+    }
+    return filePath;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/Aggregator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/Aggregator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/Aggregator.java
new file mode 100644
index 0000000..c70585e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/Aggregator.java
@@ -0,0 +1,382 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs;
+
+import akka.actor.ActorRef;
+import org.apache.ambari.view.hive20.actor.message.job.SaveDagInformation;
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive20.resources.IResourceManager;
+import org.apache.ambari.view.hive20.resources.files.FileService;
+import org.apache.ambari.view.hive20.resources.jobs.atsJobs.HiveQueryId;
+import org.apache.ambari.view.hive20.resources.jobs.atsJobs.IATSParser;
+import org.apache.ambari.view.hive20.resources.jobs.atsJobs.TezDagId;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobInfo;
+import org.apache.commons.beanutils.PropertyUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * View Jobs and ATS Jobs aggregator.
+ * There are 4 options:
+ * 1) ATS ExecuteJob without operationId
+ *    *Meaning*: executed outside of HS2
+ *    - ExecuteJob info only from ATS
+ * 2) ATS ExecuteJob with operationId
+ *    a) Hive View ExecuteJob with same operationId is not present
+ *        *Meaning*: executed with HS2
+ *      - ExecuteJob info only from ATS
+ *    b) Hive View ExecuteJob with operationId is present (need to merge)
+ *        *Meaning*: executed with HS2 through Hive View
+ *      - ExecuteJob info merged from ATS and from Hive View DataStorage
+ * 3) ExecuteJob present only in Hive View, ATS does not have it
+ *   *Meaning*: executed through Hive View, but Hadoop ExecuteJob was not created
+ *   it can happen if user executes query without aggregation, like just "select * from TABLE"
+ *   - ExecuteJob info only from Hive View
+ */
+public class Aggregator {
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(Aggregator.class);
+
+  private final IATSParser ats;
+  private IResourceManager<Job> viewJobResourceManager;
+  private final ActorRef operationController;
+
+  public Aggregator(IResourceManager<Job> jobResourceManager,
+                    IATSParser ats, ActorRef operationController) {
+    this.viewJobResourceManager = jobResourceManager;
+    this.ats = ats;
+    this.operationController = operationController;
+  }
+
+  /**
+   * gets all the jobs for 'username' where the job submission time is between 'startTime' (inclusive)
+   * and endTime (exclusive).
+   * Fetches the jobs from ATS and DB merges and update DB. returns the combined list.
+   *
+   * @param username:  username for which jobs have to be fetched.
+   * @param startTime: inclusive, time in secs from epoch
+   * @param endTime:   exclusive, time in secs from epoch
+   * @return: list of jobs
+   */
+  public List<Job> readAllForUserByTime(String username, long startTime, long endTime) {
+    List<HiveQueryId> queryIdList = ats.getHiveQueryIdsForUserByTime(username, startTime, endTime);
+    List<Job> allJobs = fetchDagsAndMergeJobs(queryIdList);
+    List<Job> dbOnlyJobs = readDBOnlyJobs(username, queryIdList, startTime, endTime);
+    allJobs.addAll(dbOnlyJobs);
+
+    return allJobs;
+  }
+
+  /**
+   * fetches the new state of jobs from ATS and from DB. Does merging/updating as required.
+   * @param jobInfos: infos of job to get
+   * @return: list of updated Job
+   */
+  public List<Job> readJobsByIds(List<JobInfo> jobInfos) {
+    //categorize jobs
+    List<String> jobsWithHiveIds = new LinkedList<>();
+    List<String> dbOnlyJobs = new LinkedList<>();
+
+    for (JobInfo jobInfo : jobInfos) {
+      if (null == jobInfo.getHiveId() || jobInfo.getHiveId().trim().isEmpty()) {
+        dbOnlyJobs.add(jobInfo.getJobId());
+      } else {
+        jobsWithHiveIds.add(jobInfo.getHiveId());
+      }
+    }
+
+    List<HiveQueryId> queryIdList = ats.getHiveQueryIdByEntityList(jobsWithHiveIds);
+    List<Job> allJobs = fetchDagsAndMergeJobs(queryIdList);
+    List<Job> dbJobs = readJobsFromDbByJobId(dbOnlyJobs);
+
+    allJobs.addAll(dbJobs);
+    return allJobs;
+  }
+
+  /**
+   * gets the jobs from the Database given their id
+   * @param jobsIds: list of ids of jobs
+   * @return: list of all the jobs found
+   */
+  private List<Job> readJobsFromDbByJobId(List<String> jobsIds) {
+    List<Job> jobs = new LinkedList<>();
+    for (final String jid : jobsIds) {
+      try {
+        Job job = getJobFromDbByJobId(jid);
+        jobs.add(job);
+      } catch (ItemNotFound itemNotFound) {
+        LOG.error("Error while finding job with id : {}", jid, itemNotFound);
+      }
+    }
+
+    return jobs;
+  }
+
+  /**
+   * fetches the job from DB given its id
+   * @param jobId: the id of the job to fetch
+   * @return: the job
+   * @throws ItemNotFound: if job with given id is not found in db
+   */
+  private Job getJobFromDbByJobId(final String jobId) throws ItemNotFound {
+    if (null == jobId)
+      return null;
+
+    List<Job> jobs = viewJobResourceManager.readAll(new FilteringStrategy() {
+      @Override
+      public boolean isConform(Indexed item) {
+        return item.getId().equals(jobId);
+      }
+
+      @Override
+      public String whereStatement() {
+        return "id = '" + jobId + "'"; // even IDs are string
+      }
+    });
+
+    if (null != jobs && !jobs.isEmpty())
+      return jobs.get(0);
+
+    throw new ItemNotFound(String.format("Job with id %s not found.", jobId));
+  }
+
+  /**
+   * returns all the jobs from ATS and DB (for this instance) for the given user.
+   * @param username
+   * @return
+   */
+  public List<Job> readAll(String username) {
+    List<HiveQueryId> queries = ats.getHiveQueryIdsForUser(username);
+    LOG.debug("HiveQueryIds fetched : {}", queries);
+    List<Job> allJobs = fetchDagsAndMergeJobs(queries);
+    List<Job> dbOnlyJobs = readDBOnlyJobs(username, queries, null, null);
+    LOG.debug("Jobs only present in DB: {}", dbOnlyJobs);
+    allJobs.addAll(dbOnlyJobs);
+    return allJobs;
+  }
+
+  /**
+   * reads all the jobs from DB for username and excludes the jobs mentioned in queries list
+   * @param username : username for which the jobs are to be read.
+   * @param queries : the jobs to exclude
+   * @param startTime: can be null, if not then the window start time for job
+   * @param endTime: can be null, if not then the window end time for job
+   * @return : the jobs in db that are not in the queries
+   */
+  private List<Job> readDBOnlyJobs(String username, List<HiveQueryId> queries, Long startTime, Long endTime) {
+    List<Job> dbOnlyJobs = new LinkedList<>();
+    HashMap<String, String> operationIdVsHiveId = new HashMap<>();
+
+    for (HiveQueryId hqid : queries) {
+      operationIdVsHiveId.put(hqid.operationId, hqid.entity);
+    }
+    LOG.debug("operationIdVsHiveId : {} ", operationIdVsHiveId);
+    //cover case when operationId is present, but not exists in ATS
+    //e.g. optimized queries without executing jobs, like "SELECT * FROM TABLE"
+    List<Job> jobs = viewJobResourceManager.readAll(new OnlyOwnersFilteringStrategy(username));
+    for (Job job : jobs) {
+      if (null != startTime && null != endTime && null != job.getDateSubmitted()
+        && (job.getDateSubmitted() < startTime || job.getDateSubmitted() >= endTime || operationIdVsHiveId.containsKey(job.getGuid()))
+        ) {
+        continue; // don't include this in the result
+      } else {
+        dbOnlyJobs.add(job);
+      }
+    }
+    return dbOnlyJobs;
+  }
+
+  private List<Job> fetchDagsAndMergeJobs(List<HiveQueryId> queries) {
+    List<Job> allJobs = new LinkedList<Job>();
+
+    for (HiveQueryId atsHiveQuery : queries) {
+      JobImpl atsJob = null;
+      if (hasOperationId(atsHiveQuery)) {
+        try {
+          Job viewJob = getJobByOperationId(atsHiveQuery.operationId);
+          TezDagId atsTezDag = getTezDagFromHiveQueryId(atsHiveQuery);
+          atsJob = mergeHiveAtsTez(atsHiveQuery, atsTezDag, viewJob);
+        } catch (ItemNotFound itemNotFound) {
+          LOG.error("Ignore : {}", itemNotFound.getMessage());
+          continue;
+        }
+      } else {
+        TezDagId atsTezDag = getTezDagFromHiveQueryId(atsHiveQuery);
+        atsJob = atsOnlyJob(atsHiveQuery, atsTezDag);
+      }
+
+      atsJob.setHiveQueryId(atsHiveQuery.entity);
+      allJobs.add(atsJob);
+    }
+
+    return allJobs;
+  }
+
+  /**
+   * @param atsHiveQuery
+   * @param atsTezDag
+   * @param viewJob
+   * @return
+   */
+  private JobImpl mergeHiveAtsTez(HiveQueryId atsHiveQuery, TezDagId atsTezDag, Job viewJob) throws ItemNotFound {
+    saveJobInfoIfNeeded(atsHiveQuery, atsTezDag, viewJob);
+    return mergeAtsJobWithViewJob(atsHiveQuery, atsTezDag, viewJob);
+  }
+
+  public Job readATSJob(Job viewJob) throws ItemNotFound {
+
+    if (viewJob.getStatus().equals(Job.JOB_STATE_INITIALIZED) || viewJob.getStatus().equals(Job.JOB_STATE_UNKNOWN))
+      return viewJob;
+
+    String hexGuid = viewJob.getGuid();
+
+
+    HiveQueryId atsHiveQuery = ats.getHiveQueryIdByOperationId(hexGuid);
+
+    TezDagId atsTezDag = getTezDagFromHiveQueryId(atsHiveQuery);
+
+    saveJobInfoIfNeeded(atsHiveQuery, atsTezDag, viewJob, true);
+    return mergeAtsJobWithViewJob(atsHiveQuery, atsTezDag, viewJob);
+  }
+
+  private TezDagId getTezDagFromHiveQueryId(HiveQueryId atsHiveQuery) {
+    TezDagId atsTezDag;
+    if (atsHiveQuery.version >= HiveQueryId.ATS_15_RESPONSE_VERSION) {
+      atsTezDag = ats.getTezDAGByEntity(atsHiveQuery.entity);
+    } else if (atsHiveQuery.dagNames != null && atsHiveQuery.dagNames.size() > 0) {
+      String dagName = atsHiveQuery.dagNames.get(0);
+
+      atsTezDag = ats.getTezDAGByName(dagName);
+    } else {
+      atsTezDag = new TezDagId();
+    }
+    return atsTezDag;
+  }
+
+  protected boolean hasOperationId(HiveQueryId atsHiveQuery) {
+    return atsHiveQuery.operationId != null;
+  }
+
+  protected JobImpl mergeAtsJobWithViewJob(HiveQueryId atsHiveQuery, TezDagId atsTezDag, Job viewJob) {
+    JobImpl atsJob;
+    try {
+      atsJob = new JobImpl(PropertyUtils.describe(viewJob));
+    } catch (IllegalAccessException e) {
+      LOG.error("Can't instantiate JobImpl", e);
+      return null;
+    } catch (InvocationTargetException e) {
+      LOG.error("Can't instantiate JobImpl", e);
+      return null;
+    } catch (NoSuchMethodException e) {
+      LOG.error("Can't instantiate JobImpl", e);
+      return null;
+    }
+    fillAtsJobFields(atsJob, atsHiveQuery, atsTezDag);
+    return atsJob;
+  }
+
+  protected void saveJobInfoIfNeeded(HiveQueryId hiveQueryId, TezDagId tezDagId, Job viewJob) throws ItemNotFound {
+    saveJobInfoIfNeeded(hiveQueryId, tezDagId, viewJob, false);
+  }
+
+  protected void saveJobInfoIfNeeded(HiveQueryId hiveQueryId, TezDagId tezDagId, Job viewJob, boolean useActorSystem) throws ItemNotFound {
+    boolean updateDb = false;
+    String dagName = null;
+    String dagId = null;
+    String applicationId = null;
+    if (viewJob.getDagName() == null || viewJob.getDagName().isEmpty()) {
+      if (hiveQueryId.dagNames != null && hiveQueryId.dagNames.size() > 0) {
+        dagName = hiveQueryId.dagNames.get(0);
+        updateDb = true;
+      }
+    }
+    if (tezDagId.status != null && (tezDagId.status.compareToIgnoreCase(Job.JOB_STATE_UNKNOWN) != 0) &&
+        !viewJob.getStatus().equalsIgnoreCase(tezDagId.status)) {
+      dagId = tezDagId.entity;
+      applicationId = tezDagId.applicationId;
+      updateDb = true;
+    }
+
+    if(updateDb) {
+      if (useActorSystem) {
+        LOG.info("Saving DAG information via actor system for job id: {}", viewJob.getId());
+        operationController.tell(new SaveDagInformation(viewJob.getId(), dagName, dagId, applicationId), ActorRef.noSender());
+      } else {
+        viewJob.setDagName(dagName);
+        viewJob.setDagId(dagId);
+        viewJob.setApplicationId(applicationId);
+        viewJobResourceManager.update(viewJob, viewJob.getId());
+      }
+    }
+  }
+
+  protected JobImpl atsOnlyJob(HiveQueryId atsHiveQuery, TezDagId atsTezDag) {
+    JobImpl atsJob = new JobImpl();
+    atsJob.setId(atsHiveQuery.entity);
+    fillAtsJobFields(atsJob, atsHiveQuery, atsTezDag);
+
+    String query = atsHiveQuery.query;
+    atsJob.setTitle(query.substring(0, (query.length() > 42) ? 42 : query.length()));
+
+    atsJob.setQueryFile(FileService.JSON_PATH_FILE + atsHiveQuery.url + "#otherinfo.QUERY!queryText");
+    return atsJob;
+  }
+
+  protected JobImpl fillAtsJobFields(JobImpl atsJob, HiveQueryId atsHiveQuery, TezDagId atsTezDag) {
+    atsJob.setApplicationId(atsTezDag.applicationId);
+
+    if (atsHiveQuery.dagNames != null && atsHiveQuery.dagNames.size() > 0)
+      atsJob.setDagName(atsHiveQuery.dagNames.get(0));
+    atsJob.setDagId(atsTezDag.entity);
+    if (atsHiveQuery.starttime != 0)
+      atsJob.setDateSubmitted(atsHiveQuery.starttime);
+    atsJob.setDuration(atsHiveQuery.duration);
+    return atsJob;
+  }
+
+  protected Job getJobByOperationId(final String opId) throws ItemNotFound {
+    List<Job> jobs = viewJobResourceManager.readAll(new FilteringStrategy() {
+      @Override
+      public boolean isConform(Indexed item) {
+        Job opHandle = (Job) item;
+        return opHandle.getGuid().equals(opId);
+      }
+
+      @Override
+      public String whereStatement() {
+        return "guid='" + opId + "'";
+      }
+    });
+
+    if (jobs.size() != 1)
+      throw new ItemNotFound();
+
+    return jobs.get(0);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobResourceProvider.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobResourceProvider.java
new file mode 100644
index 0000000..6156933
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobResourceProvider.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs;
+
+import org.apache.ambari.view.*;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.*;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Resource provider for job
+ */
+public class JobResourceProvider implements ResourceProvider<Job> {
+  @Inject
+  ViewContext context;
+
+  protected JobResourceManager resourceManager = null;
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(JobResourceProvider.class);
+
+  protected synchronized JobResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new JobResourceManager(new SharedObjectsFactory(context), context);
+    }
+    return resourceManager;
+  }
+
+  @Override
+  public Job getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      return getResourceManager().read(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+  }
+
+  @Override
+  public Set<Job> getResources(ReadRequest readRequest) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    if (context == null) {
+      return new HashSet<Job>();
+    }
+    return new HashSet<Job>(getResourceManager().readAll(
+        new OnlyOwnersFilteringStrategy(this.context.getUsername())));
+  }
+
+  @Override
+  public void createResource(String s, Map<String, Object> stringObjectMap) throws SystemException, ResourceAlreadyExistsException, NoSuchResourceException, UnsupportedPropertyException {
+    Job item = null;
+    try {
+      item = new JobImpl(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on creating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on creating resource", e);
+    }
+    getResourceManager().create(item);
+    JobController jobController = new SharedObjectsFactory(context).getJobControllerFactory().createControllerForJob(item);
+    try {
+      jobController.submit();
+    } catch (Throwable throwable) {
+      throw new SystemException("error on creating resource", throwable);
+    }
+  }
+
+  @Override
+  public boolean updateResource(String resourceId, Map<String, Object> stringObjectMap) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    Job item = null;
+    try {
+      item = new JobImpl(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on updating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on updating resource", e);
+    }
+    try {
+      getResourceManager().update(item, resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+
+  @Override
+  public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      getResourceManager().delete(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java
new file mode 100644
index 0000000..675ea37
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java
@@ -0,0 +1,626 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs;
+
+import akka.actor.ActorRef;
+import com.beust.jcommander.internal.Lists;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.ConnectionFactory;
+import org.apache.ambari.view.hive20.ConnectionSystem;
+import org.apache.ambari.view.hive20.actor.message.job.Failure;
+import org.apache.ambari.view.hive20.backgroundjobs.BackgroundJobController;
+import org.apache.ambari.view.hive20.client.AsyncJobRunner;
+import org.apache.ambari.view.hive20.client.AsyncJobRunnerImpl;
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.Cursor;
+import org.apache.ambari.view.hive20.client.EmptyCursor;
+import org.apache.ambari.view.hive20.client.HiveClientException;
+import org.apache.ambari.view.hive20.client.NonPersistentCursor;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.resources.jobs.atsJobs.IATSParser;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobController;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobInfo;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobResourceManager;
+import org.apache.ambari.view.hive20.utils.MisconfigurationFormattedException;
+import org.apache.ambari.view.hive20.utils.NotFoundFormattedException;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+import org.apache.commons.beanutils.PropertyUtils;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVPrinter;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.StreamingOutput;
+import javax.ws.rs.core.UriInfo;
+import java.io.BufferedWriter;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.lang.reflect.InvocationTargetException;
+import java.sql.SQLException;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+
+/**
+ * Servlet for queries
+ * API:
+ * GET /:id
+ *      read job
+ * POST /
+ *      create new job
+ *      Required: title, queryFile
+ * GET /
+ *      get all Jobs of current user
+ */
+public class JobService extends BaseService {
+  @Inject
+  ViewResourceHandler handler;
+
+  private JobResourceManager resourceManager;
+
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(JobService.class);
+  private Aggregator aggregator;
+
+  protected synchronized JobResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      SharedObjectsFactory connectionsFactory = getSharedObjectsFactory();
+      resourceManager = new JobResourceManager(connectionsFactory, context);
+    }
+    return resourceManager;
+  }
+
+
+  protected Aggregator getAggregator() {
+    if (aggregator == null) {
+      IATSParser atsParser = getSharedObjectsFactory().getATSParser();
+      ActorRef operationController = ConnectionSystem.getInstance().getOperationController(context);
+      aggregator = new Aggregator(getResourceManager(), atsParser, operationController);
+    }
+    return aggregator;
+  }
+
+  protected void setAggregator(Aggregator aggregator) {
+    this.aggregator = aggregator;
+  }
+
+  /**
+   * Get single item
+   */
+  @GET
+  @Path("{jobId}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getOne(@PathParam("jobId") String jobId) {
+    try {
+      JobController jobController = getResourceManager().readController(jobId);
+
+      Job job = jobController.getJob();
+      if(job.getStatus().equals(Job.JOB_STATE_ERROR) || job.getStatus().equals(Job.JOB_STATE_CANCELED)){
+        ConnectionSystem system = ConnectionSystem.getInstance();
+        final AsyncJobRunner asyncJobRunner = new AsyncJobRunnerImpl(context, system.getOperationController(context), system.getActorSystem());
+        Optional<Failure> error = asyncJobRunner.getError(jobId, context.getUsername());
+
+        if(error.isPresent()){
+          Throwable th = error.get().getError();
+          if(th instanceof SQLException){
+            SQLException sqlException = (SQLException) th;
+            if(sqlException.getSQLState().equals("AUTHFAIL") && ConnectionFactory.isLdapEnabled(context))
+              return Response.status(401).build();
+          }
+          throw new Exception(th);
+        }
+      }
+
+      JSONObject jsonJob = jsonObjectFromJob(jobController);
+      return Response.ok(jsonJob).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  private JSONObject jsonObjectFromJob(JobController jobController) throws IllegalAccessException, NoSuchMethodException, InvocationTargetException {
+    Job hiveJob = jobController.getJobPOJO();
+
+    Job mergedJob;
+    try {
+      mergedJob = getAggregator().readATSJob(hiveJob);
+    } catch (ItemNotFound itemNotFound) {
+      throw new ServiceFormattedException("E010 ExecuteJob not found", itemNotFound);
+    }
+    Map createdJobMap = PropertyUtils.describe(mergedJob);
+    createdJobMap.remove("class"); // no need to show Bean class on client
+
+    JSONObject jobJson = new JSONObject();
+    jobJson.put("job", createdJobMap);
+    return jobJson;
+  }
+
+  /**
+   * Get job results in csv format
+   */
+  @GET
+  @Path("{jobId}/results/csv")
+  @Produces("text/csv")
+  public Response getResultsCSV(@PathParam("jobId") String jobId,
+                                @Context HttpServletResponse response,
+                                @QueryParam("fileName") String fileName,
+                                @QueryParam("columns") final String requestedColumns) {
+    try {
+
+      final String username = context.getUsername();
+
+      ConnectionSystem system = ConnectionSystem.getInstance();
+      final AsyncJobRunner asyncJobRunner = new AsyncJobRunnerImpl(context, system.getOperationController(context), system.getActorSystem());
+
+      Optional<NonPersistentCursor> cursorOptional = asyncJobRunner.resetAndGetCursor(jobId, username);
+
+      if(!cursorOptional.isPresent()){
+        throw new Exception("Download failed");
+      }
+
+      final NonPersistentCursor resultSet = cursorOptional.get();
+
+
+      StreamingOutput stream = new StreamingOutput() {
+        @Override
+        public void write(OutputStream os) throws IOException, WebApplicationException {
+          Writer writer = new BufferedWriter(new OutputStreamWriter(os));
+          CSVPrinter csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT);
+          try {
+
+            List<ColumnDescription> descriptions = resultSet.getDescriptions();
+            List<String> headers = Lists.newArrayList();
+            for (ColumnDescription description : descriptions) {
+              headers.add(description.getName());
+            }
+
+            csvPrinter.printRecord(headers.toArray());
+
+            while (resultSet.hasNext()) {
+              csvPrinter.printRecord(resultSet.next().getRow());
+              writer.flush();
+            }
+          } finally {
+            writer.close();
+          }
+        }
+      };
+
+      if (fileName == null || fileName.isEmpty()) {
+        fileName = "results.csv";
+      }
+
+      return Response.ok(stream).
+          header("Content-Disposition", String.format("attachment; filename=\"%s\"", fileName)).
+          build();
+
+
+    } catch (WebApplicationException ex) {
+      throw ex;
+    }  catch (Throwable ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Get job results in csv format
+   */
+  @GET
+  @Path("{jobId}/results/csv/saveToHDFS")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getResultsToHDFS(@PathParam("jobId") String jobId,
+                                   @QueryParam("commence") String commence,
+                                   @QueryParam("file") final String targetFile,
+                                   @QueryParam("stop") final String stop,
+                                   @QueryParam("columns") final String requestedColumns,
+                                   @Context HttpServletResponse response) {
+    try {
+
+      final JobController jobController = getResourceManager().readController(jobId);
+      final String username = context.getUsername();
+
+      String backgroundJobId = "csv" + String.valueOf(jobController.getJob().getId());
+      if (commence != null && commence.equals("true")) {
+        if (targetFile == null)
+          throw new MisconfigurationFormattedException("targetFile should not be empty");
+
+        ConnectionSystem system = ConnectionSystem.getInstance();
+        final AsyncJobRunner asyncJobRunner = new AsyncJobRunnerImpl(context, system.getOperationController(context), system.getActorSystem());
+
+        Optional<NonPersistentCursor> cursorOptional = asyncJobRunner.resetAndGetCursor(jobId, username);
+
+        if(!cursorOptional.isPresent()){
+          throw new Exception("Download failed");
+        }
+
+        final NonPersistentCursor resultSet = cursorOptional.get();
+
+        BackgroundJobController.getInstance(context).startJob(String.valueOf(backgroundJobId), new Runnable() {
+          @Override
+          public void run() {
+
+            try {
+
+              FSDataOutputStream stream = getSharedObjectsFactory().getHdfsApi().create(targetFile, true);
+              Writer writer = new BufferedWriter(new OutputStreamWriter(stream));
+              CSVPrinter csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT);
+              try {
+                while (resultSet.hasNext() && !Thread.currentThread().isInterrupted()) {
+                  csvPrinter.printRecord(resultSet.next().getRow());
+                  writer.flush();
+                }
+              } finally {
+                writer.close();
+              }
+              stream.close();
+
+            } catch (IOException e) {
+              throw new ServiceFormattedException("F010 Could not write CSV to HDFS for job#" + jobController.getJob().getId(), e);
+            } catch (InterruptedException e) {
+              throw new ServiceFormattedException("F010 Could not write CSV to HDFS for job#" + jobController.getJob().getId(), e);
+            }
+          }
+        });
+      }
+
+      if (stop != null && stop.equals("true")) {
+        BackgroundJobController.getInstance(context).interrupt(backgroundJobId);
+      }
+
+      JSONObject object = new JSONObject();
+      object.put("stopped", BackgroundJobController.getInstance(context).isInterrupted(backgroundJobId));
+      object.put("jobId", jobController.getJob().getId());
+      object.put("backgroundJobId", backgroundJobId);
+      object.put("operationType", "CSV2HDFS");
+      object.put("status", BackgroundJobController.getInstance(context).state(backgroundJobId).toString());
+
+      return Response.ok(object).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+
+  @Path("{jobId}/status")
+  @GET
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response fetchJobStatus(@PathParam("jobId") String jobId) throws ItemNotFound, HiveClientException, NoOperationStatusSetException {
+    JobController jobController = getResourceManager().readController(jobId);
+    Job job = jobController.getJob();
+    String jobStatus = job.getStatus();
+
+
+    LOG.info("jobStatus : {} for jobId : {}",jobStatus, jobId);
+
+    JSONObject jsonObject = new JSONObject();
+    jsonObject.put("jobStatus", jobStatus);
+    jsonObject.put("jobId", jobId);
+
+    return Response.ok(jsonObject).build();
+  }
+
+  /**
+   * Get next results page
+   */
+  @GET
+  @Path("{jobId}/results")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getResults(@PathParam("jobId") final String jobId,
+                             @QueryParam("first") final String fromBeginning,
+                             @QueryParam("count") Integer count,
+                             @QueryParam("searchId") String searchId,
+                             @QueryParam("format") String format,
+                             @QueryParam("columns") final String requestedColumns) {
+    try {
+
+      final String username = context.getUsername();
+
+      ConnectionSystem system = ConnectionSystem.getInstance();
+      final AsyncJobRunner asyncJobRunner = new AsyncJobRunnerImpl(context, system.getOperationController(context), system.getActorSystem());
+
+      return ResultsPaginationController.getInstance(context)
+              .request(jobId, searchId, true, fromBeginning, count, format,requestedColumns,
+                      new Callable<Cursor< Row, ColumnDescription >>() {
+                        @Override
+                        public Cursor call() throws Exception {
+                          Optional<NonPersistentCursor> cursor;
+                          if(fromBeginning != null && fromBeginning.equals("true")){
+                            cursor = asyncJobRunner.resetAndGetCursor(jobId, username);
+                          }
+                          else {
+                            cursor = asyncJobRunner.getCursor(jobId, username);
+                          }
+                          if(cursor.isPresent())
+                          return cursor.get();
+                          else
+                            return new EmptyCursor();
+                        }
+                      }).build();
+
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Renew expiration time for results
+   */
+  @GET
+  @Path("{jobId}/results/keepAlive")
+  public Response keepAliveResults(@PathParam("jobId") String jobId,
+                             @QueryParam("first") String fromBeginning,
+                             @QueryParam("count") Integer count) {
+    try {
+      if (!ResultsPaginationController.getInstance(context).keepAlive(jobId, ResultsPaginationController.DEFAULT_SEARCH_ID)) {
+        throw new NotFoundFormattedException("Results already expired", null);
+      }
+      return Response.ok().build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Get progress info
+   */
+  @GET
+  @Path("{jobId}/progress")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getProgress(@PathParam("jobId") String jobId) {
+    try {
+      final JobController jobController = getResourceManager().readController(jobId);
+
+      ProgressRetriever.Progress progress = new ProgressRetriever(jobController.getJob(), getSharedObjectsFactory()).
+          getProgress();
+
+      return Response.ok(progress).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Delete single item
+   */
+  @DELETE
+  @Path("{id}")
+  public Response delete(@PathParam("id") String id,
+                         @QueryParam("remove") final String remove) {
+    try {
+      JobController jobController;
+      try {
+        jobController = getResourceManager().readController(id);
+      } catch (ItemNotFound itemNotFound) {
+        throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+      }
+      jobController.cancel();
+      if (remove != null && remove.compareTo("true") == 0) {
+        getResourceManager().delete(id);
+      }
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Get all Jobs
+   */
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getList(@QueryParam("startTime") long startTime, @QueryParam("endTime") long endTime) {
+    try {
+
+      LOG.debug("Getting all job: startTime: {}, endTime: {}",startTime,endTime);
+      List<Job> allJobs = getAggregator().readAllForUserByTime(context.getUsername(),startTime, endTime);
+      for(Job job : allJobs) {
+        job.setSessionTag(null);
+      }
+      JSONObject result = new JSONObject();
+      result.put("jobs", allJobs);
+      return Response.ok(result).build();
+    } catch (WebApplicationException ex) {
+      LOG.error("Exception occured while fetching all jobs.", ex);
+      throw ex;
+    } catch (Exception ex) {
+      LOG.error("Exception occured while fetching all jobs.", ex);
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * fetch the jobs with given info.
+   * provide as much info about the job so that next api can optimize the fetch process.
+   * @param jobInfos
+   * @return
+   */
+  @Path("/getList")
+  @POST
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public List<Job> getList(List<JobInfo> jobInfos) {
+    try {
+      LOG.debug("fetching jobs with ids :{}", jobInfos);
+      List<Job> allJobs = getAggregator().readJobsByIds(jobInfos);
+      for(Job job : allJobs) {
+        job.setSessionTag(null);
+      }
+
+      return allJobs;
+    } catch (WebApplicationException ex) {
+      LOG.error("Exception occured while fetching all jobs.", ex);
+      throw ex;
+    } catch (Exception ex) {
+      LOG.error("Exception occured while fetching all jobs.", ex);
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Create job
+   */
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response create(JobRequest request, @Context HttpServletResponse response,
+                         @Context UriInfo ui) {
+    try {
+      Map jobInfo = PropertyUtils.describe(request.job);
+      Job job = new JobImpl(jobInfo);
+      JobController createdJobController = new JobServiceInternal().createJob(job, getResourceManager());
+      JSONObject jobObject = jsonObjectFromJob(createdJobController);
+      response.setHeader("Location",
+        String.format("%s/%s", ui.getAbsolutePath().toString(), job.getId()));
+      return Response.ok(jobObject).status(201).build();
+    } catch (WebApplicationException ex) {
+      LOG.error("Error occurred while creating job : ",ex);
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      LOG.error("Error occurred while creating job : ",itemNotFound);
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Throwable ex) {
+      LOG.error("Error occurred while creating job : ",ex);
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Remove connection credentials
+   */
+  @DELETE
+  @Path("auth")
+  public Response removePassword() {
+    try {
+      //new UserLocalHiveAuthCredentials().remove(context);
+      //connectionLocal.remove(context);  // force reconnect on next get
+      return Response.ok().status(200).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+
+  /**
+   * Invalidate session
+   */
+  @DELETE
+  @Path("sessions/{sessionTag}")
+  public Response invalidateSession(@PathParam("sessionTag") String sessionTag) {
+    try {
+      //Connection connection = connectionLocal.get(context);
+      //connection.invalidateSessionByTag(sessionTag);
+      return Response.ok().build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Session status
+   */
+  @GET
+  @Path("sessions/{sessionTag}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response sessionStatus(@PathParam("sessionTag") String sessionTag) {
+    try {
+      //Connection connection = connectionLocal.get(context);
+
+      JSONObject session = new JSONObject();
+      session.put("sessionTag", sessionTag);
+      try {
+        //connection.getSessionByTag(sessionTag);
+        session.put("actual", true);
+      } catch (Exception /*HiveClientException*/ ex) {
+        session.put("actual", false);
+      }
+
+      //TODO: New implementation
+
+      JSONObject status = new JSONObject();
+      status.put("session", session);
+      return Response.ok(status).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Wrapper object for json mapping
+   */
+  public static class JobRequest {
+    public JobImpl job;
+  }
+
+  /**
+   * Wrapper for authentication json mapping
+   */
+  public static class AuthRequest {
+    public String password;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobServiceInternal.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobServiceInternal.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobServiceInternal.java
new file mode 100644
index 0000000..1409ba8
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobServiceInternal.java
@@ -0,0 +1,35 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.resources.jobs;
+
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobController;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobResourceManager;
+
+public class JobServiceInternal {
+  public JobController createJob(Job job, JobResourceManager resourceManager) throws Throwable {
+    resourceManager.create(job);
+
+    JobController createdJobController = resourceManager.readController(job.getId());
+    createdJobController.submit();
+    resourceManager.saveIfModified(createdJobController);
+    return createdJobController;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ModifyNotificationDelegate.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ModifyNotificationDelegate.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ModifyNotificationDelegate.java
new file mode 100644
index 0000000..073cdc7
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ModifyNotificationDelegate.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs;
+
+public interface ModifyNotificationDelegate {
+  boolean onModification(Object object);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ModifyNotificationInvocationHandler.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ModifyNotificationInvocationHandler.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ModifyNotificationInvocationHandler.java
new file mode 100644
index 0000000..51058f5
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ModifyNotificationInvocationHandler.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs;
+
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.Method;
+
+public class ModifyNotificationInvocationHandler implements InvocationHandler {
+  private Object proxied;
+  private ModifyNotificationDelegate modifyDelegate;
+
+  public ModifyNotificationInvocationHandler(Object proxied, ModifyNotificationDelegate delegate) {
+    this.proxied = proxied;
+    this.modifyDelegate = delegate;
+  }
+
+  @Override
+  public Object invoke(Object o, Method method, Object[] args) throws Throwable {
+    if (method.getName().startsWith("set")) {
+      modifyDelegate.onModification(proxied);
+    }
+    return method.invoke(proxied, args);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/NoOperationStatusSetException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/NoOperationStatusSetException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/NoOperationStatusSetException.java
new file mode 100644
index 0000000..31d97d0
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/NoOperationStatusSetException.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs;
+
+
+public class NoOperationStatusSetException extends Exception {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ProgressRetriever.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ProgressRetriever.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ProgressRetriever.java
new file mode 100644
index 0000000..4d8c7d7
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ProgressRetriever.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs;
+
+import org.apache.ambari.view.hive20.resources.jobs.atsJobs.TezVertexId;
+import org.apache.ambari.view.hive20.resources.jobs.rm.RMParser;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+
+import java.util.List;
+
+public class ProgressRetriever {
+  private final Progress progress;
+  private final Job job;
+  private final SharedObjectsFactory sharedObjects;
+
+  public ProgressRetriever(Job job, SharedObjectsFactory sharedObjects) {
+    this.job = job;
+    this.sharedObjects = sharedObjects;
+
+    this.progress = new Progress();
+  }
+
+  public Progress getProgress() {
+    jobCheck();
+
+    progress.dagProgress = sharedObjects.getRMParser().getDAGProgress(
+        job.getApplicationId(), job.getDagId());
+
+    List<TezVertexId> vertices = sharedObjects.getATSParser().getVerticesForDAGId(job.getDagId());
+    progress.vertexProgresses = sharedObjects.getRMParser().getDAGVerticesProgress(job.getApplicationId(), job.getDagId(), vertices);
+
+    return progress;
+  }
+
+  public void jobCheck() {
+    if (job.getApplicationId() == null || job.getApplicationId().isEmpty()) {
+      throw new ServiceFormattedException("E070 ApplicationId is not defined yet");
+    }
+    if (job.getDagId() == null || job.getDagId().isEmpty()) {
+      throw new ServiceFormattedException("E080 DagID is not defined yet");
+    }
+  }
+
+  public static class Progress {
+    public Double dagProgress;
+    public List<RMParser.VertexProgress> vertexProgresses;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ResultsPaginationController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ResultsPaginationController.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ResultsPaginationController.java
new file mode 100644
index 0000000..6efa2a9
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ResultsPaginationController.java
@@ -0,0 +1,286 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs;
+
+
+import com.google.common.base.Function;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.Cursor;
+import org.apache.ambari.view.hive20.client.HiveClientException;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.utils.BadRequestFormattedException;
+import org.apache.ambari.view.hive20.utils.ResultFetchFormattedException;
+import org.apache.ambari.view.hive20.utils.ResultNotReadyFormattedException;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.commons.collections4.map.PassiveExpiringMap;
+import org.apache.hadoop.hbase.util.Strings;
+
+import javax.ws.rs.core.Response;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.Callable;
+
+/**
+ * Results Pagination Controller
+ * Persists cursors for result sets
+ */
+public class ResultsPaginationController {
+  public static final String DEFAULT_SEARCH_ID = "default";
+  private static Map<String, ResultsPaginationController> viewSingletonObjects = new HashMap<String, ResultsPaginationController>();
+  public static ResultsPaginationController getInstance(ViewContext context) {
+    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
+      viewSingletonObjects.put(context.getInstanceName(), new ResultsPaginationController());
+    return viewSingletonObjects.get(context.getInstanceName());
+  }
+
+  public ResultsPaginationController() {
+  }
+
+  private static final long EXPIRING_TIME = 10*60*1000;  // 10 minutes
+  private static final int DEFAULT_FETCH_COUNT = 50;
+  private Map<String, Cursor<Row, ColumnDescription>> resultsCache;
+
+  public static class CustomTimeToLiveExpirationPolicy extends PassiveExpiringMap.ConstantTimeToLiveExpirationPolicy<String, Cursor<Row, ColumnDescription>> {
+    public CustomTimeToLiveExpirationPolicy(long timeToLiveMillis) {
+      super(timeToLiveMillis);
+    }
+
+    @Override
+    public long expirationTime(String key, Cursor<Row, ColumnDescription> value) {
+      if (key.startsWith("$")) {
+        return -1;  //never expire
+      }
+      return super.expirationTime(key, value);
+    }
+  }
+
+  private Map<String, Cursor<Row, ColumnDescription>> getResultsCache() {
+    if (resultsCache == null) {
+      PassiveExpiringMap<String, Cursor<Row, ColumnDescription>> resultsCacheExpiringMap =
+          new PassiveExpiringMap<>(new CustomTimeToLiveExpirationPolicy(EXPIRING_TIME));
+      resultsCache = Collections.synchronizedMap(resultsCacheExpiringMap);
+    }
+    return resultsCache;
+  }
+
+  /**
+   * Renew timer of cache entry.
+   * @param key name/id of results request
+   * @return false if entry not found; true if renew was ok
+   */
+  public boolean keepAlive(String key, String searchId) {
+    if (searchId == null)
+      searchId = DEFAULT_SEARCH_ID;
+    String effectiveKey = key + "?" + searchId;
+    if (!getResultsCache().containsKey(effectiveKey)) {
+      return false;
+    }
+    Cursor cursor = getResultsCache().get(effectiveKey);
+    getResultsCache().put(effectiveKey, cursor);
+    cursor.keepAlive();
+    return true;
+  }
+
+  private Cursor<Row, ColumnDescription> getResultsSet(String key, Callable<Cursor<Row, ColumnDescription>> makeResultsSet) {
+    if (!getResultsCache().containsKey(key)) {
+      Cursor resultSet;
+      try {
+        resultSet = makeResultsSet.call();
+        if (resultSet.isResettable()) {
+          resultSet.reset();
+        }
+      } catch (ResultNotReadyFormattedException | ResultFetchFormattedException ex) {
+        throw ex;
+      } catch (Exception ex) {
+        throw new ServiceFormattedException(ex.getMessage(), ex);
+      }
+      getResultsCache().put(key, resultSet);
+    }
+
+    return getResultsCache().get(key);
+  }
+
+  public Response.ResponseBuilder request(String key, String searchId, boolean canExpire, String fromBeginning, Integer count, String format, String requestedColumns, Callable<Cursor<Row, ColumnDescription>> makeResultsSet) throws HiveClientException {
+    if (searchId == null)
+      searchId = DEFAULT_SEARCH_ID;
+    key = key + "?" + searchId;
+    if (!canExpire)
+      key = "$" + key;
+    if (fromBeginning != null && fromBeginning.equals("true") && getResultsCache().containsKey(key)) {
+
+      getResultsCache().remove(key);
+    }
+
+    Cursor<Row, ColumnDescription> resultSet = getResultsSet(key, makeResultsSet);
+
+    if (count == null)
+      count = DEFAULT_FETCH_COUNT;
+
+    List<ColumnDescription> allschema = resultSet.getDescriptions();
+    List<Row> allRowEntries = FluentIterable.from(resultSet)
+      .limit(count).toList();
+
+    List<ColumnDescription> schema = allschema;
+
+    final Set<Integer> selectedColumns = getRequestedColumns(requestedColumns);
+    if (!selectedColumns.isEmpty()) {
+      schema = filter(allschema, selectedColumns);
+    }
+
+    List<Object[]> rows = FluentIterable.from(allRowEntries)
+      .transform(new Function<Row, Object[]>() {
+        @Override
+        public Object[] apply(Row input) {
+          if(!selectedColumns.isEmpty()) {
+            return filter(Lists.newArrayList(input.getRow()), selectedColumns).toArray();
+          } else {
+            return input.getRow();
+          }
+        }
+      }).toList();
+
+    int read = rows.size();
+    if(format != null && format.equalsIgnoreCase("d3")) {
+      List<Map<String,Object>> results = new ArrayList<>();
+      for(int i=0; i<rows.size(); i++) {
+        Object[] row = rows.get(i);
+        Map<String, Object> keyValue = new HashMap<>(row.length);
+        for(int j=0; j<row.length; j++) {
+          //Replace dots in schema with underscore
+          String schemaName = schema.get(j).getName();
+          keyValue.put(schemaName.replace('.','_'), row[j]);
+        }
+        results.add(keyValue);
+      }
+      return Response.ok(results);
+    } else {
+      ResultsResponse resultsResponse = new ResultsResponse();
+      resultsResponse.setSchema(schema);
+      resultsResponse.setRows(rows);
+      resultsResponse.setReadCount(read);
+      resultsResponse.setHasNext(resultSet.hasNext());
+      //      resultsResponse.setSize(resultSet.size());
+      resultsResponse.setOffset(resultSet.getOffset());
+      resultsResponse.setHasResults(true);
+      return Response.ok(resultsResponse);
+    }
+  }
+
+  private <T> List<T> filter(List<T> list, Set<Integer> selectedColumns) {
+    List<T> filtered = Lists.newArrayList();
+    for(int i: selectedColumns) {
+      if(list != null && list.get(i) != null)
+        filtered.add(list.get(i));
+    }
+
+    return filtered;
+  }
+
+  private Set<Integer> getRequestedColumns(String requestedColumns) {
+    if(Strings.isEmpty(requestedColumns)) {
+      return new HashSet<>();
+    }
+    Set<Integer> selectedColumns = Sets.newHashSet();
+    for (String columnRequested : requestedColumns.split(",")) {
+      try {
+        selectedColumns.add(Integer.parseInt(columnRequested));
+      } catch (NumberFormatException ex) {
+        throw new BadRequestFormattedException("Columns param should be comma-separated integers", ex);
+      }
+    }
+    return selectedColumns;
+  }
+
+  private static class ResultsResponse {
+    private List<ColumnDescription> schema;
+    private List<String[]> rows;
+    private int readCount;
+    private boolean hasNext;
+    private long offset;
+    private boolean hasResults;
+
+    public void setSchema(List<ColumnDescription> schema) {
+      this.schema = schema;
+    }
+
+    public List<ColumnDescription> getSchema() {
+      return schema;
+    }
+
+    public void setRows(List<Object[]> rows) {
+      if( null == rows ){
+        this.rows = null;
+      }
+      this.rows = new ArrayList<String[]>(rows.size());
+      for(Object[] row : rows ){
+        String[] strs = new String[row.length];
+        for( int colNum = 0 ; colNum < row.length ; colNum++ ){
+          String value = String.valueOf(row[colNum]);
+          if(row[colNum] != null && (value.isEmpty() || value.equalsIgnoreCase("null"))){
+            strs[colNum] = String.format("\"%s\"",value);
+          }else{
+            strs[colNum] = value;
+          }
+        }
+        this.rows.add(strs);
+      }
+    }
+
+    public List<String[]> getRows() {
+      return rows;
+    }
+
+    public void setReadCount(int readCount) {
+      this.readCount = readCount;
+    }
+
+    public void setHasNext(boolean hasNext) {
+      this.hasNext = hasNext;
+    }
+
+    public boolean isHasNext() {
+      return hasNext;
+    }
+
+    public long getOffset() {
+      return offset;
+    }
+
+    public void setOffset(long offset) {
+      this.offset = offset;
+    }
+
+    public boolean getHasResults() {
+      return hasResults;
+    }
+
+    public void setHasResults(boolean hasResults) {
+      this.hasResults = hasResults;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSParser.java
new file mode 100644
index 0000000..6e9753d
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSParser.java
@@ -0,0 +1,248 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.atsJobs;
+
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Parser of ATS responses
+ */
+public class ATSParser implements IATSParser {
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(ATSParser.class);
+
+  private ATSRequestsDelegate delegate;
+
+  private static final long MillisInSecond = 1000L;
+
+  public ATSParser(ATSRequestsDelegate delegate) {
+    this.delegate = delegate;
+  }
+
+  /**
+   * returns all HiveQueryIDs from ATS for the given user.
+   * @param username
+   * @return
+   */
+  @Override
+  public List<HiveQueryId> getHiveQueryIdsForUser(String username) {
+    JSONObject entities = delegate.hiveQueryIdsForUser(username);
+    return parseHqidJsonFromATS(entities);
+  }
+
+  /**
+   * parses the JSONArray or hive query IDs
+   * @param entities: should contain 'entities' element as JSONArray
+   * @return
+   */
+  private List<HiveQueryId> parseHqidJsonFromATS(JSONObject entities) {
+    JSONArray jobs = (JSONArray) entities.get("entities");
+
+    return getHqidListFromJsonArray(jobs);
+  }
+
+  /**
+   * parses List of HiveQueryIds from JSON
+   * @param jobs
+   * @return
+   */
+  private List<HiveQueryId> getHqidListFromJsonArray(JSONArray jobs) {
+    List<HiveQueryId> parsedJobs = new LinkedList<>();
+    for (Object job : jobs) {
+      try {
+        HiveQueryId parsedJob = parseAtsHiveJob((JSONObject) job);
+        parsedJobs.add(parsedJob);
+      } catch (Exception ex) {
+        LOG.error("Error while parsing ATS job", ex);
+      }
+    }
+
+    return parsedJobs;
+  }
+
+  @Override
+  public List<TezVertexId> getVerticesForDAGId(String dagId) {
+    JSONObject entities = delegate.tezVerticesListForDAG(dagId);
+    JSONArray vertices = (JSONArray) entities.get("entities");
+
+    List<TezVertexId> parsedVertices = new LinkedList<TezVertexId>();
+    for(Object vertex : vertices) {
+      try {
+        TezVertexId parsedVertex = parseVertex((JSONObject) vertex);
+        parsedVertices.add(parsedVertex);
+      } catch (Exception ex) {
+        LOG.error("Error while parsing the vertex", ex);
+      }
+    }
+
+    return parsedVertices;
+  }
+
+  @Override
+  public HiveQueryId getHiveQueryIdByOperationId(String guidString) {
+    JSONObject entities = delegate.hiveQueryIdByOperationId(guidString);
+    return getHiveQueryIdFromJson(entities);
+  }
+
+  private HiveQueryId getHiveQueryIdFromJson(JSONObject entities) {
+    JSONArray jobs = (JSONArray) entities.get("entities");
+
+    if (jobs.size() == 0) {
+      return new HiveQueryId();
+    }
+
+    return parseAtsHiveJob((JSONObject) jobs.get(0));
+  }
+
+  /**
+   * returns the hive entity from ATS. empty object if not found.
+   *
+   * @param hiveId: the entityId of the hive
+   * @return: empty entity if not found else HiveQueryId
+   */
+  @Override
+  public HiveQueryId getHiveQueryIdByHiveEntityId(String hiveId) {
+    JSONObject entity = delegate.hiveQueryEntityByEntityId(hiveId);
+    return parseAtsHiveJob(entity);
+  }
+
+  @Override
+  public TezDagId getTezDAGByName(String name) {
+    JSONArray tezDagEntities = (JSONArray) delegate.tezDagByName(name).get("entities");
+    return parseTezDag(tezDagEntities);
+  }
+
+  @Override
+  public TezDagId getTezDAGByEntity(String entity) {
+    JSONArray tezDagEntities = (JSONArray) delegate.tezDagByEntity(entity).get("entities");
+    return parseTezDag(tezDagEntities);
+  }
+
+  /**
+   * fetches the HIVE_QUERY_ID from ATS for given user between given time period
+   *
+   * @param username:  username for which to fetch hive query IDs
+   * @param startTime: time in miliseconds, inclusive
+   * @param endTime:   time in miliseconds, exclusive
+   * @return: List of HIVE_QUERY_ID
+   */
+  @Override
+  public List<HiveQueryId> getHiveQueryIdsForUserByTime(String username, long startTime, long endTime) {
+    JSONObject entities = delegate.hiveQueryIdsForUserByTime(username, startTime, endTime);
+    return parseHqidJsonFromATS(entities);
+  }
+
+  @Override
+  public List<HiveQueryId> getHiveQueryIdByEntityList(List<String> hiveIds) {
+    List<HiveQueryId> hiveQueryIds = new LinkedList<>();
+    for (String id : hiveIds) {
+      HiveQueryId hqi = this.getHiveQueryIdByHiveEntityId(id);
+      if (null != hqi.entity) {
+        hiveQueryIds.add(hqi);
+      }
+    }
+    return hiveQueryIds;
+  }
+
+  private TezDagId parseTezDag(JSONArray tezDagEntities) {
+    assert tezDagEntities.size() <= 1;
+    if (tezDagEntities.size() == 0) {
+      return new TezDagId();
+    }
+    JSONObject tezDagEntity = (JSONObject) tezDagEntities.get(0);
+
+    TezDagId parsedDag = new TezDagId();
+    JSONArray applicationIds = (JSONArray) ((JSONObject) tezDagEntity.get("primaryfilters")).get("applicationId");
+    parsedDag.entity = (String) tezDagEntity.get("entity");
+    parsedDag.applicationId = (String) applicationIds.get(0);
+    parsedDag.status = (String) ((JSONObject) tezDagEntity.get("otherinfo")).get("status");
+    return parsedDag;
+  }
+
+  private HiveQueryId parseAtsHiveJob(JSONObject job) {
+    HiveQueryId parsedJob = new HiveQueryId();
+
+    parsedJob.entity = (String) job.get("entity");
+    parsedJob.url = delegate.hiveQueryIdDirectUrl((String) job.get("entity"));
+    parsedJob.starttime = ((Long) job.get("starttime"));
+
+    JSONObject primaryfilters = (JSONObject) job.get("primaryfilters");
+    JSONArray operationIds = (JSONArray) primaryfilters.get("operationid");
+    if (operationIds != null) {
+      parsedJob.operationId = (String) (operationIds).get(0);
+    }
+    JSONArray users = (JSONArray) primaryfilters.get("user");
+    if (users != null) {
+      parsedJob.user = (String) (users).get(0);
+    }
+
+    JSONObject lastEvent = getLastEvent(job);
+    long lastEventTimestamp = ((Long) lastEvent.get("timestamp"));
+
+    parsedJob.duration = (lastEventTimestamp - parsedJob.starttime) / MillisInSecond;
+
+    JSONObject otherinfo = (JSONObject) job.get("otherinfo");
+    if (otherinfo.get("QUERY") != null) {  // workaround for HIVE-10829
+      JSONObject query = (JSONObject) JSONValue.parse((String) otherinfo.get("QUERY"));
+
+      parsedJob.query = (String) query.get("queryText");
+      JSONObject stages = (JSONObject) ((JSONObject) query.get("queryPlan")).get("STAGE PLANS");
+
+      List<String> dagIds = new LinkedList<String>();
+      List<JSONObject> stagesList = new LinkedList<JSONObject>();
+
+      for (Object key : stages.keySet()) {
+        JSONObject stage = (JSONObject) stages.get(key);
+        if (stage.get("Tez") != null) {
+          String dagId = (String) ((JSONObject) stage.get("Tez")).get("DagId:");
+          dagIds.add(dagId);
+        }
+        stagesList.add(stage);
+      }
+      parsedJob.dagNames = dagIds;
+      parsedJob.stages = stagesList;
+    }
+
+    if (otherinfo.get("VERSION") != null) {
+      parsedJob.version = (Long) otherinfo.get("VERSION");
+    }
+    return parsedJob;
+  }
+
+  private TezVertexId parseVertex(JSONObject vertex) {
+    TezVertexId tezVertexId = new TezVertexId();
+    tezVertexId.entity = (String)vertex.get("entity");
+    JSONObject otherinfo = (JSONObject)vertex.get("otherinfo");
+    if (otherinfo != null)
+      tezVertexId.vertexName = (String)otherinfo.get("vertexName");
+    return tezVertexId;
+  }
+
+  private JSONObject getLastEvent(JSONObject atsEntity) {
+    JSONArray events = (JSONArray) atsEntity.get("events");
+    return (JSONObject) events.get(0);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSParserFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSParserFactory.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSParserFactory.java
new file mode 100644
index 0000000..343202e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSParserFactory.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.atsJobs;
+
+import org.apache.ambari.view.ViewContext;
+
+public class ATSParserFactory {
+
+  private ViewContext context;
+
+  public ATSParserFactory(ViewContext context) {
+    this.context = context;
+  }
+
+  public ATSParser getATSParser() {
+    ATSRequestsDelegateImpl delegate = new ATSRequestsDelegateImpl(context, getATSUrl(context));
+    return new ATSParser(delegate);
+  }
+
+  public static String getATSUrl(ViewContext context) {
+    return context.getProperties().get("yarn.ats.url");
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSRequestsDelegate.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSRequestsDelegate.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSRequestsDelegate.java
new file mode 100644
index 0000000..dac42aa
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/atsJobs/ATSRequestsDelegate.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.jobs.atsJobs;
+
+import org.json.simple.JSONObject;
+
+public interface ATSRequestsDelegate {
+  String hiveQueryIdDirectUrl(String entity);
+
+  String hiveQueryIdOperationIdUrl(String operationId);
+
+  String tezDagDirectUrl(String entity);
+
+  String tezDagNameUrl(String name);
+
+  String tezVerticesListForDAGUrl(String dagId);
+
+  JSONObject hiveQueryIdsForUser(String username);
+
+  JSONObject hiveQueryIdByOperationId(String operationId);
+
+  JSONObject tezDagByName(String name);
+
+  JSONObject tezVerticesListForDAG(String dagId);
+
+  JSONObject tezDagByEntity(String entity);
+
+  JSONObject hiveQueryIdsForUserByTime(String username, long startTime, long endTime);
+
+  JSONObject hiveQueryEntityByEntityId(String hiveEntityId);
+}


[19/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java
new file mode 100644
index 0000000..37f24d2
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseChangeNotifier.java
@@ -0,0 +1,168 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.PoisonPill;
+import akka.actor.Props;
+import com.google.common.collect.Sets;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.internal.dto.TableInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ *
+ */
+public class DatabaseChangeNotifier extends HiveActor {
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  private String currentDatabaseName;
+  private Map<String, TableWrapper> tables = new HashMap<>();
+  private Map<String, TableInfo> newTables = new HashMap<>();
+
+  @Override
+  public void handleMessage(HiveMessage hiveMessage) {
+    Object message = hiveMessage.getMessage();
+    if(message instanceof DatabaseAdded) {
+      handleDatabaseAdded((DatabaseAdded) message);
+    } else if ( message instanceof DatabaseRemoved) {
+      handleDatabaseRemoved((DatabaseRemoved) message);
+    } else if (message instanceof TableUpdated) {
+      handleTableUpdated((TableUpdated) message);
+    } else if (message instanceof AllTablesUpdated) {
+      handleAllTableUpdated((AllTablesUpdated) message);
+    }
+  }
+
+  private void handleDatabaseAdded(DatabaseAdded message) {
+    LOG.info("Database Added: {}", message.name);
+    currentDatabaseName = message.name;
+    // TODO: Send event to eventbus
+  }
+
+  private void handleDatabaseRemoved(DatabaseRemoved message) {
+    LOG.info("Database Removed: {}", message.name);
+    // TODO: Send event to eventbus
+  }
+
+  private void handleTableUpdated(TableUpdated message) {
+    LOG.info("XXXXX: table xxxx. Size: {}", newTables.size());
+    newTables.put(message.info.getName(), message.info);
+  }
+
+  private void handleAllTableUpdated(AllTablesUpdated message) {
+    Set<String> oldTableNames = new HashSet<>(tables.keySet());
+    Set<String> newTableNames = new HashSet<>(newTables.keySet());
+
+    Set<String> tablesAdded = Sets.difference(newTableNames, oldTableNames);
+    Set<String> tablesRemoved = Sets.difference(oldTableNames, newTableNames);
+    Set<String> tablesUpdated = Sets.intersection(oldTableNames, newTableNames);
+
+    updateTablesAdded(tablesAdded);
+    updateTablesRemoved(tablesRemoved);
+    updateTablesUpdated(tablesUpdated);
+    newTables.clear();
+  }
+
+  private void updateTablesAdded(Set<String> tablesAdded) {
+    for (String tableName: tablesAdded) {
+      TableWrapper wrapper = new TableWrapper(tableName);
+      tables.put(tableName, wrapper);
+      wrapper.getTableNotifier().tell(new TableChangeNotifier.TableAdded(newTables.get(tableName)), getSelf());
+    }
+  }
+
+  private void updateTablesRemoved(Set<String> tablesRemoved) {
+    for(String tableName: tablesRemoved) {
+      TableWrapper tableWrapper = tables.remove(tableName);
+      tableWrapper.getTableNotifier().tell(new TableChangeNotifier.TableRemoved(tableName), getSelf());
+      tableWrapper.getTableNotifier().tell(PoisonPill.getInstance(), getSelf());
+    }
+  }
+
+  private void updateTablesUpdated(Set<String> tablesUpdated) {
+    for(String tableName: tablesUpdated) {
+      TableWrapper tableWrapper = tables.get(tableName);
+      // TODO: Check what needs to be done here.
+    }
+  }
+
+  public static Props props() {
+    return Props.create(DatabaseChangeNotifier.class);
+  }
+
+  public class TableWrapper {
+    private final String tableName;
+    private final ActorRef tableNotifier;
+
+    private TableWrapper(String tableName) {
+      this.tableName = tableName;
+      this.tableNotifier = getContext().actorOf(TableChangeNotifier.props());
+    }
+
+    public String getTableName() {
+      return tableName;
+    }
+
+    public ActorRef getTableNotifier() {
+      return tableNotifier;
+    }
+  }
+
+  public static class DatabaseAdded {
+    private final String name;
+
+    public DatabaseAdded(String name) {
+      this.name = name;
+    }
+  }
+
+
+  public static class DatabaseRemoved {
+    private final String name;
+
+    public DatabaseRemoved(String name) {
+      this.name = name;
+    }
+  }
+
+  public static class TableUpdated {
+    private final TableInfo info;
+
+    public TableUpdated(TableInfo info) {
+      this.info = info;
+    }
+  }
+
+  public static class AllTablesUpdated {
+    private final String database;
+
+    public AllTablesUpdated(String database) {
+      this.database = database;
+    }
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java
new file mode 100644
index 0000000..6dc4ad9
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DatabaseManager.java
@@ -0,0 +1,285 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.PoisonPill;
+import akka.actor.Props;
+import com.google.common.base.Function;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.Sets;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.AuthParams;
+import org.apache.ambari.view.hive20.ConnectionFactory;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.internal.Connectable;
+import org.apache.ambari.view.hive20.internal.HiveConnectionWrapper;
+import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.concurrent.duration.Duration;
+
+import javax.annotation.Nullable;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Manages database related state, queries Hive to get the list of databases and then manages state for each database.
+ * Also, periodically updates the list of databases by calling hive.
+ */
+public class DatabaseManager extends HiveActor {
+
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  private final Connectable connectable;
+
+  private final ActorRef metaDataRetriever;
+  private final String username;
+
+  private boolean refreshInProgress = false;
+  private boolean selfRefreshQueued = false;
+
+  private Map<String, DatabaseWrapper> databases = new HashMap<>();
+  private Set<String> databasesToUpdate;
+
+
+  public DatabaseManager(String username, Connectable connectable) {
+    this.username = username;
+    this.connectable = connectable;
+    metaDataRetriever = getContext().actorOf(MetaDataRetriever.props(connectable));
+  }
+
+  @Override
+  public void handleMessage(HiveMessage hiveMessage) {
+
+    Object message = hiveMessage.getMessage();
+    if (message instanceof Refresh) {
+      handleRefresh();
+    } else if (message instanceof SelfRefresh) {
+      handleSelfRefresh();
+    } else if (message instanceof MetaDataRetriever.DBRefreshed) {
+      handleDBRefreshed((MetaDataRetriever.DBRefreshed) message);
+    } else if (message instanceof MetaDataRetriever.TableRefreshed) {
+      handleTableRefreshed((MetaDataRetriever.TableRefreshed) message);
+    } else if (message instanceof MetaDataRetriever.AllTableRefreshed) {
+      handleAllTableRefeshed((MetaDataRetriever.AllTableRefreshed) message);
+    } else if (message instanceof GetDatabases) {
+      handleGetDatabases((GetDatabases) message);
+    }
+
+  }
+
+  private void handleSelfRefresh() {
+    if (refreshInProgress) {
+      getContext().system().scheduler().scheduleOnce(Duration.create(500, TimeUnit.MILLISECONDS),
+          getSelf(), new SelfRefresh(), getContext().dispatcher(), getSelf());
+    } else {
+      selfRefreshQueued = false;
+      refresh();
+    }
+  }
+
+  private void handleRefresh() {
+    if (refreshInProgress && selfRefreshQueued) {
+      return; // We will not honor refresh message when a refresh is going on and another self refresh is queued in mailbox
+    } else if (refreshInProgress) {
+      selfRefreshQueued = true; // If refresh is in progress, we will queue up only one refresh message.
+      getContext().system().scheduler().scheduleOnce(Duration.create(500, TimeUnit.MILLISECONDS),
+          getSelf(), new SelfRefresh(), getContext().dispatcher(), getSelf());
+    } else {
+      refresh();
+    }
+  }
+
+  private void handleDBRefreshed(MetaDataRetriever.DBRefreshed message) {
+    Set<DatabaseInfo> databasesInfos = message.getDatabases();
+    Set<String> currentDatabases = new HashSet<>(databases.keySet());
+    Set<String> newDatabases = FluentIterable.from(databasesInfos).transform(new Function<DatabaseInfo, String>() {
+      @Nullable
+      @Override
+      public String apply(@Nullable DatabaseInfo databaseInfo) {
+        return databaseInfo.getName();
+      }
+    }).toSet();
+
+    databasesToUpdate = new HashSet<>(newDatabases);
+
+    Set<String> databasesAdded = Sets.difference(newDatabases, currentDatabases);
+    Set<String> databasesRemoved = Sets.difference(currentDatabases, newDatabases);
+
+    updateDatabasesAdded(databasesAdded, databasesInfos);
+    updateDatabasesRemoved(databasesRemoved);
+  }
+
+  private void updateDatabasesAdded(Set<String> databasesAdded, Set<DatabaseInfo> databasesInfos) {
+    for (DatabaseInfo info : databasesInfos) {
+      if (databasesAdded.contains(info.getName())) {
+        DatabaseWrapper wrapper = new DatabaseWrapper(info);
+        databases.put(info.getName(), wrapper);
+        wrapper.getDatabaseNotifier().tell(new DatabaseChangeNotifier.DatabaseAdded(info.getName()), getSelf());
+      }
+    }
+  }
+
+  private void updateDatabasesRemoved(Set<String> databasesRemoved) {
+    for (String database : databasesRemoved) {
+      DatabaseWrapper wrapper = databases.remove(database);
+      ActorRef notifier = wrapper.getDatabaseNotifier();
+      notifier.tell(new DatabaseChangeNotifier.DatabaseRemoved(database), getSelf());
+      notifier.tell(PoisonPill.getInstance(), getSelf());
+    }
+  }
+
+  private void handleTableRefreshed(MetaDataRetriever.TableRefreshed message) {
+    ActorRef databaseChangeNotifier = getDatabaseChangeNotifier(message.getDatabase());
+    updateTable(message.getDatabase(), message.getTable());
+    databaseChangeNotifier.tell(new DatabaseChangeNotifier.TableUpdated(message.getTable()), getSelf());
+  }
+
+  private void handleAllTableRefeshed(MetaDataRetriever.AllTableRefreshed message) {
+    ActorRef databaseChangeNotifier = getDatabaseChangeNotifier(message.getDatabase());
+    databaseChangeNotifier.tell(new DatabaseChangeNotifier.AllTablesUpdated(message.getDatabase()), getSelf());
+    if (checkIfAllTablesOfAllDatabaseRefeshed(message)) {
+      refreshInProgress = false;
+    }
+  }
+
+  private void handleGetDatabases(GetDatabases message) {
+    if (refreshInProgress) {
+      // If currently refreshing, then schedule the same message after 500 milliseconds
+      getContext().system().scheduler().scheduleOnce(Duration.create(500, TimeUnit.MILLISECONDS),
+          getSelf(), message, getContext().dispatcher(), getSender());
+      return;
+    }
+    Set<DatabaseInfo> infos = new HashSet<>();
+    for (DatabaseWrapper wrapper : databases.values()) {
+      infos.add(wrapper.getDatabase());
+    }
+    getSender().tell(new DatabasesResult(infos), getSelf());
+  }
+
+  private boolean checkIfAllTablesOfAllDatabaseRefeshed(MetaDataRetriever.AllTableRefreshed message) {
+    databasesToUpdate.remove(message.getDatabase());
+    return databasesToUpdate.isEmpty();
+  }
+
+  private ActorRef getDatabaseChangeNotifier(String databaseName) {
+    DatabaseWrapper wrapper = databases.get(databaseName);
+    ActorRef databaseChangeNotifier = null;
+    if (wrapper != null) {
+      databaseChangeNotifier = wrapper.getDatabaseNotifier();
+    }
+    return databaseChangeNotifier;
+  }
+
+  private void refresh() {
+    LOG.info("Received refresh for user");
+    refreshInProgress = true;
+    metaDataRetriever.tell(new MetaDataRetriever.RefreshDB(), getSelf());
+
+    scheduleRefreshAfter(1, TimeUnit.MINUTES);
+  }
+
+  private void scheduleRefreshAfter(long time, TimeUnit timeUnit) {
+    getContext().system().scheduler().scheduleOnce(Duration.create(time, timeUnit),
+        getSelf(), new Refresh(username), getContext().dispatcher(), getSelf());
+  }
+
+  @Override
+  public void postStop() throws Exception {
+    LOG.info("Database Manager stopped!!!");
+    connectable.disconnect();
+  }
+
+  private void updateTable(String databaseName, TableInfo table) {
+    DatabaseWrapper wrapper = databases.get(databaseName);
+    if (wrapper != null) {
+      DatabaseInfo info = wrapper.getDatabase();
+      info.getTables().add(table);
+    }
+  }
+
+  public static Props props(ViewContext context) {
+    ConnectionConfig config = ConnectionFactory.create(context);
+    Connectable connectable = new HiveConnectionWrapper(config.getJdbcUrl(), config.getUsername(), config.getPassword(), new AuthParams(context));
+    return Props.create(DatabaseManager.class, config.getUsername(), connectable);
+  }
+
+  public static class Refresh {
+    private final String username;
+
+    public Refresh(String username) {
+      this.username = username;
+    }
+
+    public String getUsername() {
+      return username;
+    }
+  }
+
+  private static class SelfRefresh {
+  }
+
+  private class DatabaseWrapper {
+    private final DatabaseInfo database;
+    private final ActorRef databaseNotifier;
+
+    private DatabaseWrapper(DatabaseInfo database) {
+      this.database = database;
+      databaseNotifier = getContext().actorOf(DatabaseChangeNotifier.props());
+    }
+
+    public DatabaseInfo getDatabase() {
+      return database;
+    }
+
+    public ActorRef getDatabaseNotifier() {
+      return databaseNotifier;
+    }
+  }
+
+  public static class GetDatabases {
+    private final String username;
+
+    public GetDatabases(String username) {
+      this.username = username;
+    }
+
+    public String getUsername() {
+      return username;
+    }
+  }
+
+  public static class DatabasesResult {
+    private final Set<DatabaseInfo> databases;
+
+    public DatabasesResult(Set<DatabaseInfo> databases) {
+      this.databases = databases;
+    }
+
+    public Set<DatabaseInfo> getDatabases() {
+      return databases;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DeathWatch.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DeathWatch.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DeathWatch.java
new file mode 100644
index 0000000..58cefcd
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/DeathWatch.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.Terminated;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.RegisterActor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Date;
+
+public class DeathWatch extends HiveActor {
+
+    private final static Logger LOG =
+            LoggerFactory.getLogger(DeathWatch.class);
+
+    @Override
+    public void handleMessage(HiveMessage hiveMessage) {
+        Object message = hiveMessage.getMessage();
+        if(message instanceof RegisterActor){
+            RegisterActor registerActor = (RegisterActor) message;
+            ActorRef actorRef = registerActor.getActorRef();
+            this.getContext().watch(actorRef);
+            LOG.info("Registered new actor "+ actorRef);
+            LOG.info("Registration for {} at {}", actorRef,new Date());
+        }
+        if(message instanceof Terminated){
+            Terminated terminated = (Terminated) message;
+            ActorRef actor = terminated.actor();
+            LOG.info("Received terminate for actor "+ actor);
+            LOG.info("Termination for {} at {}", actor,new Date());
+
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/HiveActor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/HiveActor.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/HiveActor.java
new file mode 100644
index 0000000..384b798
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/HiveActor.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.UntypedActor;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class HiveActor extends UntypedActor {
+
+    private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+    @Override
+    final public void onReceive(Object message) throws Exception {
+        HiveMessage hiveMessage = new HiveMessage(message);
+        if(LOG.isDebugEnabled()){
+            LOG.debug("Received message: " + message.getClass().getName() + ", generated id: " + hiveMessage.getId() +
+                    " sent by: " + sender() + ", recieved by" + self());
+        }
+
+        handleMessage(hiveMessage);
+
+        if(LOG.isDebugEnabled()){
+            LOG.debug("Message submitted: " + hiveMessage.getId());
+
+        }
+    }
+
+    public abstract void handleMessage(HiveMessage hiveMessage);
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
new file mode 100644
index 0000000..ce58c8c
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/JdbcConnector.java
@@ -0,0 +1,642 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.Cancellable;
+import akka.actor.PoisonPill;
+import akka.actor.Props;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.AuthParams;
+import org.apache.ambari.view.hive20.ConnectionDelegate;
+import org.apache.ambari.view.hive20.actor.message.Connect;
+import org.apache.ambari.view.hive20.actor.message.FetchError;
+import org.apache.ambari.view.hive20.actor.message.FetchResult;
+import org.apache.ambari.view.hive20.actor.message.GetColumnMetadataJob;
+import org.apache.ambari.view.hive20.actor.message.HiveJob;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.ResultInformation;
+import org.apache.ambari.view.hive20.actor.message.ResultNotReady;
+import org.apache.ambari.view.hive20.actor.message.RunStatement;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.actor.message.job.CancelJob;
+import org.apache.ambari.view.hive20.actor.message.job.ExecuteNextStatement;
+import org.apache.ambari.view.hive20.actor.message.job.ExecutionFailed;
+import org.apache.ambari.view.hive20.actor.message.job.Failure;
+import org.apache.ambari.view.hive20.actor.message.job.NoResult;
+import org.apache.ambari.view.hive20.actor.message.job.ResultSetHolder;
+import org.apache.ambari.view.hive20.actor.message.job.SaveDagInformation;
+import org.apache.ambari.view.hive20.actor.message.job.SaveGuidToDB;
+import org.apache.ambari.view.hive20.actor.message.lifecycle.CleanUp;
+import org.apache.ambari.view.hive20.actor.message.lifecycle.DestroyConnector;
+import org.apache.ambari.view.hive20.actor.message.lifecycle.FreeConnector;
+import org.apache.ambari.view.hive20.actor.message.lifecycle.InactivityCheck;
+import org.apache.ambari.view.hive20.actor.message.lifecycle.KeepAlive;
+import org.apache.ambari.view.hive20.actor.message.lifecycle.TerminateInactivityCheck;
+import org.apache.ambari.view.hive20.internal.Connectable;
+import org.apache.ambari.view.hive20.internal.ConnectionException;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.hive20.utils.HiveActorConfiguration;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.hive.jdbc.HiveConnection;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.concurrent.duration.Duration;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayDeque;
+import java.util.Queue;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+
+/**
+ * Wraps one Jdbc connection per user, per instance. This is used to delegate execute the statements and
+ * creates child actors to delegate the ResultSet extraction, YARN/ATS querying for ExecuteJob info and Log Aggregation
+ */
+public class JdbcConnector extends HiveActor {
+
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  public static final String SUFFIX = "validating the login";
+
+  /**
+   * Interval for maximum inactivity allowed
+   */
+  private final static long MAX_INACTIVITY_INTERVAL = 5 * 60 * 1000;
+
+  /**
+   * Interval for maximum inactivity allowed before termination
+   */
+  private static final long MAX_TERMINATION_INACTIVITY_INTERVAL = 10 * 60 * 1000;
+
+  private static final long MILLIS_IN_SECOND = 1000L;
+
+  private final Storage storage;
+
+  /**
+   * Keeps track of the timestamp when the last activity has happened. This is
+   * used to calculate the inactivity period and take lifecycle decisions based
+   * on it.
+   */
+  private long lastActivityTimestamp;
+
+  /**
+   * Akka scheduler to tick at an interval to deal with inactivity of this actor
+   */
+  private Cancellable inactivityScheduler;
+
+  /**
+   * Akka scheduler to tick at an interval to deal with the inactivity after which
+   * the actor should be killed and connection should be released
+   */
+  private Cancellable terminateActorScheduler;
+
+  private Connectable connectable = null;
+  private final ActorRef deathWatch;
+  private final ConnectionDelegate connectionDelegate;
+  private final ActorRef parent;
+  private ActorRef statementExecutor = null;
+  private final HdfsApi hdfsApi;
+  private final AuthParams authParams;
+
+  /**
+   * true if the actor is currently executing any job.
+   */
+  private boolean executing = false;
+  private HiveJob.Type executionType = HiveJob.Type.SYNC;
+
+  /**
+   * Returns the timeout configurations.
+   */
+  private final HiveActorConfiguration actorConfiguration;
+  private String username;
+  private Optional<String> jobId = Optional.absent();
+  private Optional<String> logFile = Optional.absent();
+  private int statementsCount = 0;
+
+  private ActorRef commandSender = null;
+
+  private ActorRef resultSetIterator = null;
+  private boolean isFailure = false;
+  private Failure failure = null;
+  private boolean isCancelCalled = false;
+
+  /**
+   * For every execution, this will hold the statements that are left to execute
+   */
+  private Queue<String> statementQueue = new ArrayDeque<>();
+
+  public JdbcConnector(ViewContext viewContext, ActorRef parent, ActorRef deathWatch, HdfsApi hdfsApi,
+                       ConnectionDelegate connectionDelegate, Storage storage) {
+    this.hdfsApi = hdfsApi;
+    this.parent = parent;
+    this.deathWatch = deathWatch;
+    this.connectionDelegate = connectionDelegate;
+    this.storage = storage;
+    this.lastActivityTimestamp = System.currentTimeMillis();
+    resultSetIterator = null;
+
+    authParams = new AuthParams(viewContext);
+    actorConfiguration = new HiveActorConfiguration(viewContext);
+  }
+
+  @Override
+  public void handleMessage(HiveMessage hiveMessage) {
+    Object message = hiveMessage.getMessage();
+    if (message instanceof InactivityCheck) {
+      checkInactivity();
+    } else if (message instanceof TerminateInactivityCheck) {
+      checkTerminationInactivity();
+    } else if (message instanceof KeepAlive) {
+      keepAlive();
+    } else if (message instanceof CleanUp) {
+      cleanUp();
+    } else {
+      handleNonLifecycleMessage(hiveMessage);
+    }
+  }
+
+  private void handleNonLifecycleMessage(HiveMessage hiveMessage) {
+    Object message = hiveMessage.getMessage();
+    keepAlive();
+    if (message instanceof Connect) {
+      connect((Connect) message);
+    } else if (message instanceof SQLStatementJob) {
+      runStatementJob((SQLStatementJob) message);
+    } else if (message instanceof GetColumnMetadataJob) {
+      runGetMetaData((GetColumnMetadataJob) message);
+    } else if (message instanceof ExecuteNextStatement) {
+      executeNextStatement();
+    } else if (message instanceof ResultInformation) {
+      gotResultBack((ResultInformation) message);
+    } else if (message instanceof CancelJob) {
+      cancelJob((CancelJob) message);
+    } else if (message instanceof FetchResult) {
+      fetchResult((FetchResult) message);
+    } else if (message instanceof FetchError) {
+      fetchError((FetchError) message);
+    } else if (message instanceof SaveGuidToDB) {
+      saveGuid((SaveGuidToDB) message);
+    } else if (message instanceof SaveDagInformation) {
+      saveDagInformation((SaveDagInformation) message);
+    } else {
+      unhandled(message);
+    }
+  }
+
+  private void fetchError(FetchError message) {
+    if (isFailure) {
+      sender().tell(Optional.of(failure), self());
+      return;
+    }
+    sender().tell(Optional.absent(), self());
+  }
+
+  private void fetchResult(FetchResult message) {
+    if (isFailure) {
+      sender().tell(failure, self());
+      return;
+    }
+
+    if (executing) {
+      sender().tell(new ResultNotReady(jobId.get(), username), self());
+      return;
+    }
+    sender().tell(Optional.fromNullable(resultSetIterator), self());
+  }
+
+  private void cancelJob(CancelJob message) {
+    if (!executing || connectionDelegate == null) {
+      LOG.error("Cannot cancel job for user as currently the job is not running or started. JobId: {}", message.getJobId());
+      return;
+    }
+    LOG.info("Cancelling job for user. JobId: {}, user: {}", message.getJobId(), username);
+    try {
+      isCancelCalled = true;
+      connectionDelegate.cancel();
+    } catch (SQLException e) {
+      LOG.error("Failed to cancel job. JobId: {}. {}", message.getJobId(), e);
+    }
+  }
+
+  private void gotResultBack(ResultInformation message) {
+    Optional<Failure> failureOptional = message.getFailure();
+    if (failureOptional.isPresent()) {
+      Failure failure = failureOptional.get();
+      processFailure(failure);
+      return;
+    }
+    if (statementQueue.size() == 0) {
+      // This is the last resultSet
+      processResult(message.getResultSet());
+    } else {
+      self().tell(new ExecuteNextStatement(), self());
+    }
+  }
+
+  private void processCancel() {
+    executing = false;
+    if (isAsync() && jobId.isPresent()) {
+      LOG.error("Job canceled by user for JobId: {}", jobId.get());
+      updateJobStatus(jobId.get(), Job.JOB_STATE_CANCELED);
+    }
+  }
+
+  private void processFailure(Failure failure) {
+    executing = false;
+    isFailure = true;
+    this.failure = failure;
+    if (isAsync() && jobId.isPresent()) {
+      if(isCancelCalled) {
+        processCancel();
+        return;
+      }
+      updateJobStatus(jobId.get(), Job.JOB_STATE_ERROR);
+    } else {
+      // Send for sync execution
+      commandSender.tell(new ExecutionFailed(failure.getMessage(), failure.getError()), self());
+      cleanUpWithTermination();
+    }
+  }
+
+  private void processResult(Optional<ResultSet> resultSetOptional) {
+    executing = false;
+
+    LOG.info("Finished processing SQL statements for Job id : {}", jobId.or("SYNC JOB"));
+    if (isAsync() && jobId.isPresent()) {
+      updateJobStatus(jobId.get(), Job.JOB_STATE_FINISHED);
+    }
+
+    if (resultSetOptional.isPresent()) {
+      ActorRef resultSetActor = getContext().actorOf(Props.create(ResultSetIterator.class, self(),
+        resultSetOptional.get(), isAsync()).withDispatcher("akka.actor.result-dispatcher"),
+        "ResultSetIterator:" + UUID.randomUUID().toString());
+      resultSetIterator = resultSetActor;
+      if (!isAsync()) {
+        commandSender.tell(new ResultSetHolder(resultSetActor), self());
+      }
+    } else {
+      resultSetIterator = null;
+      if (!isAsync()) {
+        commandSender.tell(new NoResult(), self());
+      }
+    }
+  }
+
+  private void executeNextStatement() {
+    if (statementQueue.isEmpty()) {
+      jobExecutionCompleted();
+      return;
+    }
+
+    int index = statementsCount - statementQueue.size();
+    String statement = statementQueue.poll();
+    if (statementExecutor == null) {
+      statementExecutor = getStatementExecutor();
+    }
+
+    if (isAsync()) {
+      statementExecutor.tell(new RunStatement(index, statement, jobId.get(), true, logFile.get(), true), self());
+    } else {
+      statementExecutor.tell(new RunStatement(index, statement), self());
+    }
+  }
+
+  private void runStatementJob(SQLStatementJob message) {
+    executing = true;
+    jobId = message.getJobId();
+    logFile = message.getLogFile();
+    executionType = message.getType();
+    commandSender = getSender();
+
+    resetToInitialState();
+
+    if (!checkConnection()) return;
+
+    for (String statement : message.getStatements()) {
+      statementQueue.add(statement);
+    }
+    statementsCount = statementQueue.size();
+
+    if (isAsync() && jobId.isPresent()) {
+      updateJobStatus(jobId.get(), Job.JOB_STATE_RUNNING);
+      startInactivityScheduler();
+    }
+    self().tell(new ExecuteNextStatement(), self());
+  }
+
+  public boolean checkConnection() {
+    if (connectable == null) {
+      notifyConnectFailure(new SQLException("Hive connection is not created"));
+      return false;
+    }
+
+    Optional<HiveConnection> connectionOptional = connectable.getConnection();
+    if (!connectionOptional.isPresent()) {
+      SQLException sqlException = connectable.isUnauthorized() ? new SQLException("Hive Connection not Authorized", "AUTHFAIL")
+              : new SQLException("Hive connection is not created");
+      notifyConnectFailure(sqlException);
+      return false;
+    }
+    return true;
+  }
+
+  private void runGetMetaData(GetColumnMetadataJob message) {
+    if (!checkConnection()) return;
+    resetToInitialState();
+    executing = true;
+    executionType = message.getType();
+    commandSender = getSender();
+    statementExecutor = getStatementExecutor();
+    statementExecutor.tell(message, self());
+  }
+
+  private ActorRef getStatementExecutor() {
+    return getContext().actorOf(Props.create(StatementExecutor.class, hdfsApi, storage, connectable.getConnection().get(), connectionDelegate)
+      .withDispatcher("akka.actor.result-dispatcher"),
+      "StatementExecutor:" + UUID.randomUUID().toString());
+  }
+
+  private boolean isAsync() {
+    return executionType == HiveJob.Type.ASYNC;
+  }
+
+  private void notifyConnectFailure(Exception ex) {
+    executing = false;
+    isFailure = true;
+    this.failure = new Failure("Cannot connect to hive", ex);
+    if (isAsync()) {
+      updateJobStatus(jobId.get(), Job.JOB_STATE_ERROR);
+
+      if(ex instanceof ConnectionException){
+        ConnectionException connectionException = (ConnectionException) ex;
+        Throwable cause = connectionException.getCause();
+        if(cause instanceof SQLException){
+          SQLException sqlException = (SQLException) cause;
+          if(isLoginError(sqlException))
+            return;
+        }
+      }
+
+    } else {
+      sender().tell(new ExecutionFailed("Cannot connect to hive"), ActorRef.noSender());
+    }
+    // Do not clean up in case of failed authorizations
+    // The failure is bubbled to the user for requesting credentials
+
+    if (!(ex instanceof SQLException) || !((SQLException) ex).getSQLState().equals("AUTHFAIL")) {
+      cleanUpWithTermination();
+    }
+  }
+
+  private boolean isLoginError(SQLException ce) {
+    return ce.getCause().getMessage().toLowerCase().endsWith(SUFFIX);
+  }
+
+  private void keepAlive() {
+    lastActivityTimestamp = System.currentTimeMillis();
+  }
+
+  private void jobExecutionCompleted() {
+    // Set is executing as false so that the inactivity checks can finish cleanup
+    // after timeout
+    LOG.info("Job execution completed for user: {}. Results are ready to be fetched", username);
+    this.executing = false;
+  }
+
+  protected Optional<String> getUsername() {
+    return Optional.fromNullable(username);
+  }
+
+  private void connect(Connect message) {
+    username = message.getUsername();
+    jobId = message.getJobId();
+    executionType = message.getType();
+    // check the connectable
+    if (connectable == null) {
+      connectable = message.getConnectable(authParams);
+    }
+    // make the connectable to Hive
+    try {
+      if (!connectable.isOpen()) {
+        connectable.connect();
+      }
+    } catch (ConnectionException e) {
+      LOG.error("Failed to create a hive connection. {}", e);
+      // set up job failure
+      // notify parent about job failure
+      notifyConnectFailure(e);
+      return;
+    }
+    startTerminateInactivityScheduler();
+  }
+
+  private void updateJobStatus(String jobid, final String status) {
+    new JobSaver(jobid) {
+      @Override
+      protected void update(JobImpl job) {
+        job.setStatus(status);
+        job.setDuration(getUpdatedDuration(job.getDateSubmitted()));
+      }
+    }.save();
+    LOG.info("Stored job status for Job id: {} as '{}'", jobid, status);
+  }
+
+  private void saveGuid(final SaveGuidToDB message) {
+    new JobSaver(message.getJobId()) {
+      @Override
+      protected void update(JobImpl job) {
+        job.setGuid(message.getGuid());
+      }
+    }.save();
+    LOG.info("Stored GUID for Job id: {} as '{}'", message.getJobId(), message.getGuid());
+  }
+
+  private void saveDagInformation(final SaveDagInformation message) {
+    if(message.getDagId() == null &&
+        message.getDagName() == null &&
+        message.getApplicationId() == null) {
+      LOG.error("Cannot save Dag Information for job Id: {} as all the properties are null.", message.getJobId());
+      return;
+    }
+    new JobSaver(message.getJobId()) {
+
+      @Override
+      protected void update(JobImpl job) {
+        if (message.getApplicationId() != null) {
+          job.setApplicationId(message.getApplicationId());
+        }
+        if (message.getDagId() != null) {
+          job.setDagId(message.getDagId());
+        }
+        if(message.getDagName() != null) {
+          job.setDagName(message.getDagName());
+        }
+      }
+    }.save();
+    LOG.info("Store Dag Information for job. Job id: {}, dagName: {}, dagId: {}, applicationId: {}", message.getJobId(), message.getDagName(), message.getDagId(), message.getApplicationId());
+  }
+
+  private Long getUpdatedDuration(Long dateSubmitted) {
+    return (System.currentTimeMillis() / MILLIS_IN_SECOND) - (dateSubmitted / MILLIS_IN_SECOND);
+  }
+
+
+  private void checkInactivity() {
+    LOG.debug("Inactivity check, executing status: {}", executing);
+    if (executing) {
+      keepAlive();
+      return;
+    }
+    long current = System.currentTimeMillis();
+    if ((current - lastActivityTimestamp) > actorConfiguration.getInactivityTimeout(MAX_INACTIVITY_INTERVAL)) {
+      // Stop all the sub-actors created
+      cleanUp();
+    }
+  }
+
+  private void checkTerminationInactivity() {
+    if (!isAsync()) {
+      // Should not terminate if job is sync. Will terminate after the job is finished.
+      stopTerminateInactivityScheduler();
+      return;
+    }
+
+    LOG.debug("Termination check, executing status: {}", executing);
+    if (executing) {
+      keepAlive();
+      return;
+    }
+
+    long current = System.currentTimeMillis();
+    if ((current - lastActivityTimestamp) > actorConfiguration.getTerminationTimeout(MAX_TERMINATION_INACTIVITY_INTERVAL)) {
+      cleanUpWithTermination();
+    }
+  }
+
+  private void cleanUp() {
+    if (jobId.isPresent()) {
+      LOG.debug("{} :: Cleaning up resources for inactivity for jobId: {}", self().path().name(), jobId.get());
+    } else {
+      LOG.debug("{} ::Cleaning up resources with inactivity for Sync execution.", self().path().name());
+    }
+    this.executing = false;
+    cleanUpStatementAndResultSet();
+    stopInactivityScheduler();
+    parent.tell(new FreeConnector(username, jobId.orNull(), isAsync()), self());
+  }
+
+  private void cleanUpWithTermination() {
+    this.executing = false;
+    LOG.debug("{} :: Cleaning up resources with inactivity for execution.", self().path().name());
+    cleanUpStatementAndResultSet();
+
+    stopInactivityScheduler();
+    stopTerminateInactivityScheduler();
+    parent.tell(new DestroyConnector(username, jobId.orNull(), isAsync()), this.self());
+    self().tell(PoisonPill.getInstance(), ActorRef.noSender());
+  }
+
+
+  private void cleanUpStatementAndResultSet() {
+    connectionDelegate.closeStatement();
+    connectionDelegate.closeResultSet();
+  }
+
+  private void startTerminateInactivityScheduler() {
+    this.terminateActorScheduler = getContext().system().scheduler().schedule(
+      Duration.Zero(), Duration.create(60 * 1000, TimeUnit.MILLISECONDS),
+      this.getSelf(), new TerminateInactivityCheck(), getContext().dispatcher(), null);
+  }
+
+  private void stopTerminateInactivityScheduler() {
+    if (!(terminateActorScheduler == null || terminateActorScheduler.isCancelled())) {
+      terminateActorScheduler.cancel();
+    }
+  }
+
+  private void startInactivityScheduler() {
+    if (inactivityScheduler != null) {
+      inactivityScheduler.cancel();
+    }
+    inactivityScheduler = getContext().system().scheduler().schedule(
+      Duration.Zero(), Duration.create(15 * 1000, TimeUnit.MILLISECONDS),
+      this.self(), new InactivityCheck(), getContext().dispatcher(), null);
+  }
+
+  private void stopInactivityScheduler() {
+    if (!(inactivityScheduler == null || inactivityScheduler.isCancelled())) {
+      inactivityScheduler.cancel();
+    }
+  }
+
+  private void resetToInitialState() {
+    isFailure = false;
+    failure = null;
+    resultSetIterator = null;
+    isCancelCalled = false;
+    statementQueue = new ArrayDeque<>();
+  }
+
+  @Override
+  public void postStop() throws Exception {
+    stopInactivityScheduler();
+    stopTerminateInactivityScheduler();
+
+    if (connectable.isOpen()) {
+      connectable.disconnect();
+    }
+  }
+
+  /**
+   * Saves the job to database.
+   */
+  private abstract class JobSaver {
+    private final String jobId;
+
+    JobSaver(String jobId) {
+      this.jobId = jobId;
+    }
+
+    public void save() {
+      try {
+        JobImpl job = storage.load(JobImpl.class, jobId);
+        update(job);
+        storage.store(JobImpl.class, job);
+      } catch (ItemNotFound itemNotFound) {
+        itemNotFound(jobId);
+      }
+    }
+
+    /**
+     * Override to handle Not found exception
+     */
+    private void itemNotFound(String jobId) {
+      // Nothing to do
+    }
+
+    protected abstract void update(JobImpl job);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java
new file mode 100644
index 0000000..f9c21b4
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/LogAggregator.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.Cancellable;
+import com.google.common.base.Joiner;
+import org.apache.ambari.view.hive20.actor.message.GetMoreLogs;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.LogAggregationFinished;
+import org.apache.ambari.view.hive20.actor.message.StartLogAggregation;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.apache.hive.jdbc.HiveStatement;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.concurrent.duration.Duration;
+
+import java.sql.SQLException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Reads the logs for a ExecuteJob from the Statement and writes them into hdfs.
+ */
+public class LogAggregator extends HiveActor {
+
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  public static final int AGGREGATION_INTERVAL = 5 * 1000;
+  private final HdfsApi hdfsApi;
+  private final HiveStatement statement;
+  private final String logFile;
+
+  private Cancellable moreLogsScheduler;
+  private ActorRef parent;
+  private boolean hasStartedFetching = false;
+  private boolean shouldFetchMore = true;
+
+  public LogAggregator(HdfsApi hdfsApi, HiveStatement statement, String logFile) {
+    this.hdfsApi = hdfsApi;
+    this.statement = statement;
+    this.logFile = logFile;
+  }
+
+  @Override
+  public void handleMessage(HiveMessage hiveMessage) {
+    Object message = hiveMessage.getMessage();
+    if (message instanceof StartLogAggregation) {
+      start();
+    }
+
+    if (message instanceof GetMoreLogs) {
+      try {
+        getMoreLogs();
+      } catch (SQLException e) {
+        LOG.error("SQL Error while getting logs. Tried writing to: {}", logFile);
+      } catch (HdfsApiException e) {
+        LOG.warn("HDFS Error while getting writing logs to {}", logFile);
+
+      }
+    }
+  }
+
+  private void start() {
+    parent = this.getSender();
+    hasStartedFetching = false;
+    shouldFetchMore = true;
+    if (!(moreLogsScheduler == null || moreLogsScheduler.isCancelled())) {
+      moreLogsScheduler.cancel();
+    }
+    this.moreLogsScheduler = getContext().system().scheduler().schedule(
+      Duration.Zero(), Duration.create(AGGREGATION_INTERVAL, TimeUnit.MILLISECONDS),
+      this.getSelf(), new GetMoreLogs(), getContext().dispatcher(), null);
+  }
+
+  private void getMoreLogs() throws SQLException, HdfsApiException {
+    List<String> logs = statement.getQueryLog();
+    if (logs.size() > 0 && shouldFetchMore) {
+      String allLogs = Joiner.on("\n").skipNulls().join(logs);
+      HdfsUtil.putStringToFile(hdfsApi, logFile, allLogs);
+      if(!statement.hasMoreLogs()) {
+        shouldFetchMore = false;
+      }
+    } else {
+      // Cancel the timer only when log fetching has been started
+      if(!shouldFetchMore) {
+        moreLogsScheduler.cancel();
+        parent.tell(new LogAggregationFinished(), ActorRef.noSender());
+      }
+    }
+  }
+
+  @Override
+  public void postStop() throws Exception {
+    if (moreLogsScheduler != null && !moreLogsScheduler.isCancelled()) {
+      moreLogsScheduler.cancel();
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java
new file mode 100644
index 0000000..d63b3a0
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataManager.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.Cancellable;
+import akka.actor.Props;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.Ping;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.concurrent.duration.Duration;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Manages the Meta Information for Hive Server. Singleton actor which stores several DatabaseManagerActor in memory for
+ * each user and instance name combination.
+ */
+public class MetaDataManager extends HiveActor {
+
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  /**
+   * Stores the sub database manager actors per user combination
+   */
+  private final Map<String, ActorRef> databaseManagers = new HashMap<>();
+  private final Map<String, Cancellable> terminationSchedulers = new HashMap<>();
+  private final ViewContext context;
+
+  public MetaDataManager(ViewContext context) {
+    this.context = context;
+  }
+
+  @Override
+  public void handleMessage(HiveMessage hiveMessage) {
+
+    Object message = hiveMessage.getMessage();
+    if (message instanceof Ping) {
+      handlePing((Ping) message);
+    } else if (message instanceof Terminate) {
+      handleTerminate((Terminate) message);
+    } else if (message instanceof DatabaseManager.GetDatabases) {
+      handleGetDatabases((DatabaseManager.GetDatabases) message);
+    }
+  }
+
+  private void handlePing(Ping message) {
+    LOG.info("Ping message received for user: {}, instance: {}", message.getUsername(), message.getInstanceName());
+    ActorRef databaseManager = databaseManagers.get(message.getUsername());
+    if (databaseManager == null) {
+      databaseManager = createDatabaseManager(message.getUsername(), message.getInstanceName());
+      databaseManagers.put(context.getUsername(), databaseManager);
+      databaseManager.tell(new DatabaseManager.Refresh(context.getUsername()), getSelf());
+    } else {
+      cancelTerminationScheduler(message.getUsername());
+    }
+    scheduleTermination(context.getUsername());
+  }
+
+  private void handleTerminate(Terminate message) {
+    ActorRef databaseManager = databaseManagers.remove(message.username);
+    getContext().stop(databaseManager);
+    cancelTerminationScheduler(message.getUsername());
+  }
+
+  private void handleGetDatabases(DatabaseManager.GetDatabases message) {
+    String username = message.getUsername();
+    ActorRef databaseManager = databaseManagers.get(username);
+    if(databaseManager != null) {
+      databaseManager.tell(message, getSender());
+    } else {
+      // Not database Manager created. Start the database manager with a ping message
+      // and queue up the GetDatabases call to self
+      getSelf().tell(new Ping(username, context.getInstanceName()), getSender());
+      getSelf().tell(message, getSender());
+    }
+  }
+
+  private void cancelTerminationScheduler(String username) {
+    Cancellable cancellable = terminationSchedulers.remove(username);
+    if (!(cancellable == null || cancellable.isCancelled())) {
+      LOG.info("Cancelling termination scheduler");
+      cancellable.cancel();
+    }
+  }
+
+  private void scheduleTermination(String username) {
+    Cancellable cancellable = context().system().scheduler().scheduleOnce(Duration.create(2, TimeUnit.MINUTES),
+        getSelf(), new Terminate(username), getContext().dispatcher(), getSelf());
+    terminationSchedulers.put(username, cancellable);
+  }
+
+  private ActorRef createDatabaseManager(String username, String instanceName) {
+    LOG.info("Creating database manager for username: {}, instance: {}", username, instanceName);
+    return context().actorOf(DatabaseManager.props(context));
+  }
+
+  public static Props props(ViewContext viewContext) {
+    return Props.create(MetaDataManager.class, viewContext);
+  }
+
+  private class Terminate {
+    public final String username;
+
+    public Terminate(String username) {
+      this.username = username;
+    }
+
+    public String getUsername() {
+      return username;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java
new file mode 100644
index 0000000..7323a0a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/MetaDataRetriever.java
@@ -0,0 +1,165 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.Props;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.internal.Connectable;
+import org.apache.ambari.view.hive20.internal.ConnectionException;
+import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableInfo;
+import org.apache.hive.jdbc.HiveConnection;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ *
+ */
+public class MetaDataRetriever extends HiveActor {
+
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  private final Connectable connectable;
+
+  public MetaDataRetriever(Connectable connectable) {
+    this.connectable = connectable;
+  }
+
+  @Override
+  public void handleMessage(HiveMessage hiveMessage) {
+    Object message = hiveMessage.getMessage();
+    if (message instanceof RefreshDB) {
+      handleRefreshDB();
+    }
+  }
+
+  private void handleRefreshDB() {
+    try {
+      refreshDatabaseInfos();
+    } catch (ConnectionException | SQLException e) {
+      LOG.error("Failed to update the complete database information. Exception: {}", e);
+      getSender().tell(new DBRefreshFailed(e), getSelf());
+    }
+  }
+
+  private HiveConnection getHiveConnection() throws ConnectionException {
+    if (!connectable.isOpen()) {
+      connectable.connect();
+    }
+    Optional<HiveConnection> connectionOptional = connectable.getConnection();
+    return connectionOptional.get();
+  }
+
+  private void refreshDatabaseInfos() throws ConnectionException, SQLException {
+    HiveConnection connection = getHiveConnection();
+    Set<DatabaseInfo> infos = new HashSet<>();
+    try (ResultSet schemas = connection.getMetaData().getSchemas()) {
+      while (schemas.next()) {
+        DatabaseInfo info = new DatabaseInfo(schemas.getString(1));
+        infos.add(info);
+      }
+    }
+
+    getSender().tell(new DBRefreshed(infos), getSelf());
+
+    for (DatabaseInfo info : infos) {
+      refreshTablesInfo(info.getName());
+    }
+  }
+
+  private void refreshTablesInfo(String database) throws ConnectionException, SQLException {
+    HiveConnection connection = getHiveConnection();
+    try (ResultSet tables = connection.getMetaData().getTables("", database, null, null)) {
+      while (tables.next()) {
+        TableInfo info = new TableInfo(tables.getString(3), tables.getString(4));
+        getSender().tell(new TableRefreshed(info, database), getSelf());
+      }
+    }
+    getSender().tell(new AllTableRefreshed(database), getSelf());
+  }
+
+  public static  Props props(Connectable connectable) {
+    return Props.create(MetaDataRetriever.class, connectable);
+  }
+
+
+  public static class RefreshDB {
+
+  }
+
+  public static class DBRefreshed {
+    private final Set<DatabaseInfo> databases;
+
+    public DBRefreshed(Set<DatabaseInfo> databases) {
+      this.databases = databases;
+    }
+
+    public Set<DatabaseInfo> getDatabases() {
+      return databases;
+    }
+  }
+
+  public static class DBRefreshFailed {
+    private final Exception exception;
+
+    public DBRefreshFailed(Exception exception) {
+      this.exception = exception;
+    }
+
+    public Exception getException() {
+      return exception;
+    }
+  }
+
+  public static  class TableRefreshed {
+    private final TableInfo table;
+    private final String database;
+
+    public TableRefreshed(TableInfo table, String database) {
+      this.table = table;
+      this.database = database;
+    }
+
+    public TableInfo getTable() {
+      return table;
+    }
+
+    public String getDatabase() {
+      return database;
+    }
+  }
+
+  public static class AllTableRefreshed {
+    private final String database;
+
+    public AllTableRefreshed(String database) {
+      this.database = database;
+    }
+
+    public String getDatabase() {
+      return database;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/OperationController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/OperationController.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/OperationController.java
new file mode 100644
index 0000000..f751d8f
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/OperationController.java
@@ -0,0 +1,408 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Props;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.ConnectionDelegate;
+import org.apache.ambari.view.hive20.actor.message.Connect;
+import org.apache.ambari.view.hive20.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive20.actor.message.FetchError;
+import org.apache.ambari.view.hive20.actor.message.FetchResult;
+import org.apache.ambari.view.hive20.actor.message.HiveJob;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.JobRejected;
+import org.apache.ambari.view.hive20.actor.message.RegisterActor;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.actor.message.job.CancelJob;
+import org.apache.ambari.view.hive20.actor.message.job.FetchFailed;
+import org.apache.ambari.view.hive20.actor.message.job.SaveDagInformation;
+import org.apache.ambari.view.hive20.actor.message.lifecycle.DestroyConnector;
+import org.apache.ambari.view.hive20.actor.message.lifecycle.FreeConnector;
+import org.apache.ambari.view.hive20.internal.ContextSupplier;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.apache.ambari.view.hive20.utils.LoggingOutputStream;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.commons.collections4.MapUtils;
+import org.apache.commons.collections4.map.HashedMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.HashMap;
+import java.util.LinkedHashSet;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.Queue;
+import java.util.Set;
+import java.util.UUID;
+
+/**
+ * Router actor to control the operations. This delegates the operations to underlying child actors and
+ * store the state for them.
+ */
+public class OperationController extends HiveActor {
+
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  private final ActorSystem system;
+  private final ActorRef deathWatch;
+  private final ContextSupplier<ConnectionDelegate> connectionSupplier;
+  private final ContextSupplier<Storage> storageSupplier;
+  private final ContextSupplier<Optional<HdfsApi>> hdfsApiSupplier;
+
+  /**
+   * Store the connection per user which are currently not working
+   */
+  private final Map<String, Queue<ActorRef>> asyncAvailableConnections;
+
+  /**
+   * Store the connection per user which are currently not working
+   */
+  private final Map<String, Queue<ActorRef>> syncAvailableConnections;
+
+
+  /**
+   * Store the connection per user/per job which are currently working.
+   */
+  private final Map<String, Map<String, ActorRef>> asyncBusyConnections;
+
+  /**
+   * Store the connection per user which will be used to execute sync jobs
+   * like fetching databases, tables etc.
+   */
+  private final Map<String, Set<ActorRef>> syncBusyConnections;
+
+
+  private final ViewContext context;
+
+  public OperationController(ActorSystem system,
+                             ActorRef deathWatch,
+                             ViewContext context,
+                             ContextSupplier<ConnectionDelegate> connectionSupplier,
+                             ContextSupplier<Storage> storageSupplier,
+                             ContextSupplier<Optional<HdfsApi>> hdfsApiSupplier) {
+    this.system = system;
+    this.deathWatch = deathWatch;
+    this.context = context;
+    this.connectionSupplier = connectionSupplier;
+    this.storageSupplier = storageSupplier;
+    this.hdfsApiSupplier = hdfsApiSupplier;
+    this.asyncAvailableConnections = new HashMap<>();
+    this.syncAvailableConnections = new HashMap<>();
+    this.asyncBusyConnections = new HashedMap<>();
+    this.syncBusyConnections = new HashMap<>();
+  }
+
+  @Override
+  public void handleMessage(HiveMessage hiveMessage) {
+    Object message = hiveMessage.getMessage();
+
+    if (message instanceof ExecuteJob) {
+      ExecuteJob job = (ExecuteJob) message;
+      if (job.getJob().getType() == HiveJob.Type.ASYNC) {
+        sendJob(job.getConnect(), (SQLStatementJob) job.getJob());
+      } else if (job.getJob().getType() == HiveJob.Type.SYNC) {
+        sendSyncJob(job.getConnect(), job.getJob());
+      }
+    }
+
+    if (message instanceof CancelJob) {
+      cancelJob((CancelJob) message);
+    }
+
+    if (message instanceof FetchResult) {
+      fetchResultActorRef((FetchResult) message);
+    }
+
+    if (message instanceof FetchError) {
+      fetchError((FetchError) message);
+    }
+
+    if (message instanceof FreeConnector) {
+      freeConnector((FreeConnector) message);
+    }
+
+    if (message instanceof DestroyConnector) {
+      destroyConnector((DestroyConnector) message);
+    }
+
+    if (message instanceof SaveDagInformation) {
+      saveDagInformation((SaveDagInformation) message);
+    }
+  }
+
+  private void cancelJob(CancelJob message) {
+    String jobId = message.getJobId();
+    String username = message.getUsername();
+    ActorRef actorRef = asyncBusyConnections.get(username).get(jobId);
+    if (actorRef != null) {
+      actorRef.tell(message, sender());
+    } else {
+      String msg = String.format("Cannot cancel job. Job with id: %s for instance: %s has either not started or has expired.", message.getJobId(), context.getInstanceName());
+      LOG.error(msg);
+      sender().tell(new FetchFailed(msg), self());
+    }
+  }
+
+  private void saveDagInformation(SaveDagInformation message) {
+    ActorRef jdbcConnection = asyncBusyConnections.get(context.getUsername()).get(message.getJobId());
+    if(jdbcConnection != null) {
+      jdbcConnection.tell(message, sender());
+    } else {
+      String msg = String.format("Cannot update Dag Information for job. Job with id: %s for instance: %s has either not started or has expired.", message.getJobId(), context.getInstanceName());
+      LOG.error(msg);
+    }
+  }
+
+  private void fetchError(FetchError message) {
+    String jobId = message.getJobId();
+    String username = message.getUsername();
+    ActorRef actorRef = asyncBusyConnections.get(username).get(jobId);
+    if (actorRef != null) {
+      actorRef.tell(message, sender());
+    } else {
+      String msg = String.format("Cannot fetch error for job. Job with id: %s for instance: %s has either not started or has expired.", message.getJobId(), context.getInstanceName());
+      LOG.error(msg);
+      sender().tell(new FetchFailed(msg), self());
+    }
+  }
+
+  private void fetchResultActorRef(FetchResult message) {
+    String username = message.getUsername();
+    String jobId = message.getJobId();
+    ActorRef actorRef = asyncBusyConnections.get(username).get(jobId);
+    if (actorRef != null) {
+      actorRef.tell(message, sender());
+    } else {
+      String msg = String.format("Cannot fetch result for job. Job with id: %s for instance: %s has either not started or has expired.", message.getJobId(), context.getInstanceName());
+      LOG.error(msg);
+      sender().tell(new FetchFailed(msg), self());
+    }
+  }
+
+  private void sendJob(Connect connect, SQLStatementJob job) {
+    String username = job.getUsername();
+    String jobId = job.getJobId().get();
+    ActorRef subActor = null;
+    // Check if there is available actors to process this
+    subActor = getActorRefFromAsyncPool(username);
+    if (subActor == null) {
+      Optional<HdfsApi> hdfsApiOptional = hdfsApiSupplier.get(context);
+      if (!hdfsApiOptional.isPresent()) {
+        sender().tell(new JobRejected(username, jobId, "Failed to connect to Hive."), self());
+        return;
+      }
+      HdfsApi hdfsApi = hdfsApiOptional.get();
+
+      subActor = system.actorOf(
+        Props.create(JdbcConnector.class, context, self(),
+          deathWatch, hdfsApi, connectionSupplier.get(context),
+          storageSupplier.get(context)).withDispatcher("akka.actor.jdbc-connector-dispatcher"),
+        UUID.randomUUID().toString() + ":asyncjdbcConnector");
+      deathWatch.tell(new RegisterActor(subActor), self());
+    }
+
+    if (asyncBusyConnections.containsKey(username)) {
+      Map<String, ActorRef> actors = asyncBusyConnections.get(username);
+      if (!actors.containsKey(jobId)) {
+        actors.put(jobId, subActor);
+      } else {
+        // Reject this as with the same jobId one connection is already in progress.
+        sender().tell(new JobRejected(username, jobId, "Existing job in progress with same jobId."), ActorRef.noSender());
+      }
+    } else {
+      Map<String, ActorRef> actors = new HashMap<>();
+      actors.put(jobId, subActor);
+      asyncBusyConnections.put(username, actors);
+    }
+
+    // set up the connect with ExecuteJob id for terminations
+    subActor.tell(connect, self());
+    subActor.tell(job, self());
+
+  }
+
+  private ActorRef getActorRefFromSyncPool(String username) {
+    return getActorRefFromPool(syncAvailableConnections, username);
+  }
+
+  private ActorRef getActorRefFromAsyncPool(String username) {
+    return getActorRefFromPool(asyncAvailableConnections, username);
+  }
+
+  private ActorRef getActorRefFromPool(Map<String, Queue<ActorRef>> pool, String username) {
+    ActorRef subActor = null;
+    if (pool.containsKey(username)) {
+      Queue<ActorRef> availableActors = pool.get(username);
+      if (availableActors.size() != 0) {
+        subActor = availableActors.poll();
+      }
+    } else {
+      pool.put(username, new LinkedList<ActorRef>());
+    }
+    return subActor;
+  }
+
+  private void sendSyncJob(Connect connect, HiveJob job) {
+    String username = job.getUsername();
+    ActorRef subActor = null;
+    // Check if there is available actors to process this
+    subActor = getActorRefFromSyncPool(username);
+
+    if (subActor == null) {
+      Optional<HdfsApi> hdfsApiOptional = hdfsApiSupplier.get(context);
+      if (!hdfsApiOptional.isPresent()) {
+        sender().tell(new JobRejected(username, ExecuteJob.SYNC_JOB_MARKER, "Failed to connect to HDFS."), ActorRef.noSender());
+        return;
+      }
+      HdfsApi hdfsApi = hdfsApiOptional.get();
+
+      subActor = system.actorOf(
+        Props.create(JdbcConnector.class, context, self(),
+          deathWatch, hdfsApi, connectionSupplier.get(context),
+          storageSupplier.get(context)).withDispatcher("akka.actor.jdbc-connector-dispatcher"),
+        UUID.randomUUID().toString() + ":syncjdbcConnector");
+      deathWatch.tell(new RegisterActor(subActor), self());
+    }
+
+    if (syncBusyConnections.containsKey(username)) {
+      Set<ActorRef> actors = syncBusyConnections.get(username);
+      actors.add(subActor);
+    } else {
+      LinkedHashSet<ActorRef> actors = new LinkedHashSet<>();
+      actors.add(subActor);
+      syncBusyConnections.put(username, actors);
+    }
+
+    // Termination requires that the ref is known in case of sync jobs
+    subActor.tell(connect, sender());
+    subActor.tell(job, sender());
+  }
+
+
+  private void destroyConnector(DestroyConnector message) {
+    ActorRef sender = getSender();
+    if (message.isForAsync()) {
+      removeFromAsyncBusyPool(message.getUsername(), message.getJobId());
+      removeFromASyncAvailable(message.getUsername(), sender);
+    } else {
+      removeFromSyncBusyPool(message.getUsername(), sender);
+      removeFromSyncAvailable(message.getUsername(), sender);
+    }
+    logMaps();
+  }
+
+  private void freeConnector(FreeConnector message) {
+    ActorRef sender = getSender();
+    if (message.isForAsync()) {
+      LOG.info("About to free connector for job {} and user {}", message.getJobId(), message.getUsername());
+      Optional<ActorRef> refOptional = removeFromAsyncBusyPool(message.getUsername(), message.getJobId());
+      if (refOptional.isPresent()) {
+        addToAsyncAvailable(message.getUsername(), refOptional.get());
+      }
+      return;
+    }
+
+    // Was a sync job, remove from sync pool
+    LOG.info("About to free sync connector for user {}", message.getUsername());
+    Optional<ActorRef> refOptional = removeFromSyncBusyPool(message.getUsername(), sender);
+    if (refOptional.isPresent()) {
+      addToSyncAvailable(message.getUsername(), refOptional.get());
+    }
+
+
+    logMaps();
+
+  }
+
+  private void logMaps() {
+    LOG.debug("Pool status");
+    LoggingOutputStream out = new LoggingOutputStream(LOG, LoggingOutputStream.LogLevel.DEBUG);
+    MapUtils.debugPrint(new PrintStream(out), "Busy Async connections", asyncBusyConnections);
+    MapUtils.debugPrint(new PrintStream(out), "Available Async connections", asyncAvailableConnections);
+    MapUtils.debugPrint(new PrintStream(out), "Busy Sync connections", syncBusyConnections);
+    MapUtils.debugPrint(new PrintStream(out), "Available Sync connections", syncAvailableConnections);
+    try {
+      out.close();
+    } catch (IOException e) {
+      LOG.warn("Cannot close Logging output stream, this may lead to leaks");
+    }
+  }
+
+  private Optional<ActorRef> removeFromSyncBusyPool(String userName, ActorRef refToFree) {
+    if (syncBusyConnections.containsKey(userName)) {
+      Set<ActorRef> actorRefs = syncBusyConnections.get(userName);
+      actorRefs.remove(refToFree);
+    }
+    return Optional.of(refToFree);
+  }
+
+  private Optional<ActorRef> removeFromAsyncBusyPool(String username, String jobId) {
+    ActorRef ref = null;
+    if (asyncBusyConnections.containsKey(username)) {
+      Map<String, ActorRef> actors = asyncBusyConnections.get(username);
+      if (actors.containsKey(jobId)) {
+        ref = actors.get(jobId);
+        actors.remove(jobId);
+      }
+    }
+    return Optional.fromNullable(ref);
+  }
+
+  private void addToAsyncAvailable(String username, ActorRef actor) {
+    addToAvailable(asyncAvailableConnections, username, actor);
+  }
+
+  private void addToSyncAvailable(String username, ActorRef actor) {
+    addToAvailable(syncAvailableConnections, username, actor);
+  }
+
+  private void addToAvailable(Map<String, Queue<ActorRef>> pool, String username, ActorRef actor) {
+    if (!pool.containsKey(username)) {
+      pool.put(username, new LinkedList<ActorRef>());
+    }
+
+    Queue<ActorRef> availableActors = pool.get(username);
+    availableActors.add(actor);
+  }
+
+  private void removeFromASyncAvailable(String username, ActorRef sender) {
+    removeFromAvailable(asyncAvailableConnections, username, sender);
+  }
+
+  private void removeFromSyncAvailable(String username, ActorRef sender) {
+    removeFromAvailable(syncAvailableConnections, username, sender);
+  }
+
+  private void removeFromAvailable(Map<String, Queue<ActorRef>> pool, String username, ActorRef sender) {
+    if (!pool.containsKey(username)) {
+      return;
+    }
+    Queue<ActorRef> actors = pool.get(username);
+    actors.remove(sender);
+  }
+
+}
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/ResultSetIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/ResultSetIterator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/ResultSetIterator.java
new file mode 100644
index 0000000..4b4a407
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/ResultSetIterator.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.ActorRef;
+import com.google.common.collect.Lists;
+import org.apache.ambari.view.hive20.actor.message.CursorReset;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.ResetCursor;
+import org.apache.ambari.view.hive20.actor.message.job.FetchFailed;
+import org.apache.ambari.view.hive20.actor.message.job.Next;
+import org.apache.ambari.view.hive20.actor.message.job.NoMoreItems;
+import org.apache.ambari.view.hive20.actor.message.job.Result;
+import org.apache.ambari.view.hive20.actor.message.lifecycle.CleanUp;
+import org.apache.ambari.view.hive20.actor.message.lifecycle.KeepAlive;
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive20.client.Row;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.List;
+
+public class ResultSetIterator extends HiveActor {
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  private static final int DEFAULT_BATCH_SIZE = 100;
+  public static final String NULL = "NULL";
+
+  private final ActorRef parent;
+  private final ResultSet resultSet;
+  private final int batchSize;
+
+  private List<ColumnDescription> columnDescriptions;
+  private int columnCount;
+  boolean async = false;
+  private boolean metaDataFetched = false;
+
+  public ResultSetIterator(ActorRef parent, ResultSet resultSet, int batchSize, boolean isAsync) {
+    this.parent = parent;
+    this.resultSet = resultSet;
+    this.batchSize = batchSize;
+    this.async = isAsync;
+  }
+
+  public ResultSetIterator(ActorRef parent, ResultSet resultSet) {
+    this(parent, resultSet, DEFAULT_BATCH_SIZE, true);
+  }
+
+  public ResultSetIterator(ActorRef parent, ResultSet resultSet, boolean isAsync) {
+    this(parent, resultSet, DEFAULT_BATCH_SIZE, isAsync);
+  }
+
+  @Override
+  public void handleMessage(HiveMessage hiveMessage) {
+    sendKeepAlive();
+    Object message = hiveMessage.getMessage();
+    if (message instanceof Next) {
+      getNext();
+    }
+    if (message instanceof ResetCursor) {
+      resetResultSet();
+    }
+
+    if (message instanceof KeepAlive) {
+      sendKeepAlive();
+    }
+  }
+
+  private void resetResultSet() {
+    try {
+      resultSet.beforeFirst();
+      sender().tell(new CursorReset(), self());
+    } catch (SQLException e) {
+      LOG.error("Failed to reset the cursor", e);
+      sender().tell(new FetchFailed("Failed to reset the cursor", e), self());
+      cleanUpResources();
+    }
+  }
+
+  private void sendKeepAlive() {
+    LOG.debug("Sending a keep alive to {}", parent);
+    parent.tell(new KeepAlive(), self());
+  }
+
+  private void getNext() {
+    List<Row> rows = Lists.newArrayList();
+    if (!metaDataFetched) {
+      try {
+        initialize();
+      } catch (SQLException ex) {
+        LOG.error("Failed to fetch metadata for the ResultSet", ex);
+        sender().tell(new FetchFailed("Failed to get metadata for ResultSet", ex), self());
+        cleanUpResources();
+      }
+    }
+    int index = 0;
+    try {
+      while (resultSet.next() && index < batchSize) {
+        index++;
+        rows.add(getRowFromResultSet(resultSet));
+      }
+
+      if (index == 0) {
+        // We have hit end of resultSet
+        sender().tell(new NoMoreItems(), self());
+        if(!async) {
+          cleanUpResources();
+        }
+      } else {
+        Result result = new Result(rows, columnDescriptions);
+        sender().tell(result, self());
+      }
+
+    } catch (SQLException ex) {
+      LOG.error("Failed to fetch next batch for the Resultset", ex);
+      sender().tell(new FetchFailed("Failed to fetch next batch for the Resultset", ex), self());
+      cleanUpResources();
+    }
+  }
+
+  private void cleanUpResources() {
+    parent.tell(new CleanUp(), self());
+  }
+
+  private Row getRowFromResultSet(ResultSet resultSet) throws SQLException {
+    Object[] values = new Object[columnCount];
+    for (int i = 0; i < columnCount; i++) {
+      values[i] = resultSet.getObject(i + 1);
+    }
+    return new Row(values);
+  }
+
+  private void initialize() throws SQLException {
+    metaDataFetched = true;
+    ResultSetMetaData metaData = resultSet.getMetaData();
+    columnCount = metaData.getColumnCount();
+    columnDescriptions = Lists.newArrayList();
+    for (int i = 1; i <= columnCount; i++) {
+      String columnName = metaData.getColumnName(i);
+      String typeName = metaData.getColumnTypeName(i);
+      ColumnDescription description = new ColumnDescriptionShort(columnName, typeName, i);
+      columnDescriptions.add(description);
+    }
+  }
+}


[17/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/FreeConnector.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/FreeConnector.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/FreeConnector.java
new file mode 100644
index 0000000..a32daa3
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/FreeConnector.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.lifecycle;
+
+public class FreeConnector  {
+
+  private final String username;
+  private final String jobId;
+  private final boolean forAsync;
+
+  public FreeConnector(String username, String jobId, boolean forAsync) {
+    this.username = username;
+    this.jobId = jobId;
+    this.forAsync = forAsync;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public String getJobId() {
+    return jobId;
+  }
+
+  public boolean isForAsync() {
+    return forAsync;
+  }
+
+  @Override
+  public String toString() {
+    return "FreeConnector{" +
+      "username='" + username + '\'' +
+      ", jobId='" + jobId + '\'' +
+      ", forAsync=" + forAsync +
+      '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/InactivityCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/InactivityCheck.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/InactivityCheck.java
new file mode 100644
index 0000000..bd08b09
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/InactivityCheck.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.lifecycle;
+
+public class InactivityCheck {}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/KeepAlive.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/KeepAlive.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/KeepAlive.java
new file mode 100644
index 0000000..840289a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/KeepAlive.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.lifecycle;
+
+public class KeepAlive {}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/TerminateInactivityCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/TerminateInactivityCheck.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/TerminateInactivityCheck.java
new file mode 100644
index 0000000..7251a6a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/TerminateInactivityCheck.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.lifecycle;
+
+public class TerminateInactivityCheck {}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/backgroundjobs/BackgroundJobController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/backgroundjobs/BackgroundJobController.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/backgroundjobs/BackgroundJobController.java
new file mode 100644
index 0000000..0b21a18
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/backgroundjobs/BackgroundJobController.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.backgroundjobs;
+
+import org.apache.ambari.view.ViewContext;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class BackgroundJobController {
+  private ViewContext context;
+
+  protected BackgroundJobController(ViewContext context) {
+    this.context = context;
+  }
+
+  private static Map<String, BackgroundJobController> viewSingletonObjects = new HashMap<String, BackgroundJobController>();
+  public static BackgroundJobController getInstance(ViewContext context) {
+    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
+      viewSingletonObjects.put(context.getInstanceName(), new BackgroundJobController(context));
+    return viewSingletonObjects.get(context.getInstanceName());
+  }
+
+  private Map<String, Thread> jobs = new HashMap<String, Thread>();
+  public void startJob(String key, Runnable runnable) {
+    if (jobs.containsKey(key)) {
+      interrupt(key);
+      try {
+        jobs.get(key).join();
+      } catch (InterruptedException ignored) {
+      }
+    }
+    Thread t = new Thread(runnable);
+    jobs.put(key, t);
+    t.start();
+  }
+
+  public Thread.State state(String key) {
+    if (!jobs.containsKey(key)) {
+      return Thread.State.TERMINATED;
+    }
+
+    Thread.State state = jobs.get(key).getState();
+
+    if (state == Thread.State.TERMINATED) {
+      jobs.remove(key);
+    }
+
+    return state;
+  }
+
+  public boolean interrupt(String key) {
+    if (!jobs.containsKey(key)) {
+      return false;
+    }
+
+    jobs.get(key).interrupt();
+    return true;
+  }
+
+  public boolean isInterrupted(String key) {
+    if (state(key) == Thread.State.TERMINATED) {
+      return true;
+    }
+
+    return jobs.get(key).isInterrupted();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/AsyncJobRunner.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/AsyncJobRunner.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/AsyncJobRunner.java
new file mode 100644
index 0000000..da746a6
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/AsyncJobRunner.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.actor.message.job.Failure;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+
+public interface AsyncJobRunner {
+
+  void submitJob(ConnectionConfig connectionConfig, SQLStatementJob asyncJob, Job job);
+
+  void cancelJob(String jobId, String username);
+
+  Optional<NonPersistentCursor> getCursor(String jobId, String username);
+
+  Optional<NonPersistentCursor> resetAndGetCursor(String jobId, String username);
+
+  Optional<Failure> getError(String jobId, String username);
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/AsyncJobRunnerImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/AsyncJobRunnerImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/AsyncJobRunnerImpl.java
new file mode 100644
index 0000000..7013f8a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/AsyncJobRunnerImpl.java
@@ -0,0 +1,143 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Inbox;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.actor.message.Connect;
+import org.apache.ambari.view.hive20.actor.message.CursorReset;
+import org.apache.ambari.view.hive20.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive20.actor.message.FetchError;
+import org.apache.ambari.view.hive20.actor.message.FetchResult;
+import org.apache.ambari.view.hive20.actor.message.ResetCursor;
+import org.apache.ambari.view.hive20.actor.message.ResultNotReady;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.actor.message.job.CancelJob;
+import org.apache.ambari.view.hive20.actor.message.job.Failure;
+import org.apache.ambari.view.hive20.actor.message.job.FetchFailed;
+import org.apache.ambari.view.hive20.internal.ConnectionException;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.utils.ResultFetchFormattedException;
+import org.apache.ambari.view.hive20.utils.ResultNotReadyFormattedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.concurrent.duration.Duration;
+
+import java.sql.SQLException;
+import java.util.concurrent.TimeUnit;
+
+public class AsyncJobRunnerImpl implements AsyncJobRunner {
+
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  private final ActorRef controller;
+  private final ActorSystem system;
+  private final ViewContext context;
+
+  public AsyncJobRunnerImpl(ViewContext context, ActorRef controller, ActorSystem system) {
+    this.context = context;
+    this.controller = controller;
+    this.system = system;
+  }
+
+
+  @Override
+  public void submitJob(ConnectionConfig config, SQLStatementJob job, Job jobp) {
+    Connect connect = config.createConnectMessage(jobp.getId());
+    ExecuteJob executeJob = new ExecuteJob(connect, job);
+    controller.tell(executeJob, ActorRef.noSender());
+  }
+
+  @Override
+  public void cancelJob(String jobId, String username) {
+    controller.tell(new CancelJob(jobId, username), ActorRef.noSender());
+  }
+
+  @Override
+  public Optional<NonPersistentCursor> getCursor(String jobId, String username) {
+    Inbox inbox = Inbox.create(system);
+    inbox.send(controller, new FetchResult(jobId, username));
+    Object receive = inbox.receive(Duration.create(1, TimeUnit.MINUTES));
+    if(receive instanceof ResultNotReady) {
+      String errorString = "Result not ready for job: " + jobId + ", username: " + username + ". Try after sometime.";
+      LOG.info(errorString);
+      throw new ResultNotReadyFormattedException(errorString, new Exception(errorString));
+    } else if(receive instanceof  Failure) {
+      Failure failure = (Failure) receive;
+      throw new ResultFetchFormattedException(failure.getMessage(), failure.getError());
+    } else {
+      Optional<ActorRef> iterator = (Optional<ActorRef>) receive;
+      if(iterator.isPresent()) {
+        return Optional.of(new NonPersistentCursor(context, system, iterator.get()));
+      } else {
+        return Optional.absent();
+      }
+    }
+  }
+
+  @Override
+  public Optional<NonPersistentCursor> resetAndGetCursor(String jobId, String username) {
+    Inbox inbox = Inbox.create(system);
+    inbox.send(controller, new FetchResult(jobId, username));
+    Object receive = inbox.receive(Duration.create(1, TimeUnit.MINUTES));
+    if(receive instanceof ResultNotReady) {
+      String errorString = "Result not ready for job: " + jobId + ", username: " + username + ". Try after sometime.";
+      LOG.info(errorString);
+      throw new ResultNotReadyFormattedException(errorString, new Exception(errorString));
+    } else if(receive instanceof  Failure) {
+      Failure failure = (Failure) receive;
+      throw new ResultFetchFormattedException(failure.getMessage(), failure.getError());
+    } else {
+      Optional<ActorRef> iterator = (Optional<ActorRef>) receive;
+      if(iterator.isPresent()) {
+        inbox.send(iterator.get(), new ResetCursor());
+        Object resetResult = inbox.receive(Duration.create(1, TimeUnit.MINUTES));
+        if (resetResult instanceof CursorReset) {
+          return Optional.of(new NonPersistentCursor(context, system, iterator.get()));
+        } else {
+          return Optional.absent();
+        }
+      } else {
+        return Optional.absent();
+      }
+    }
+  }
+
+  @Override
+  public Optional<Failure> getError(String jobId, String username) {
+    Inbox inbox = Inbox.create(system);
+    inbox.send(controller, new FetchError(jobId, username));
+    Object receive = inbox.receive(Duration.create(1, TimeUnit.MINUTES));
+    if(receive instanceof FetchFailed){
+      FetchFailed fetchFailed = (FetchFailed) receive;
+      return Optional.of(new Failure(fetchFailed.getMessage(), getExceptionForRetry()));
+    }
+    Optional<Failure> result = (Optional<Failure>) receive;
+    return result;
+  }
+
+  private ConnectionException getExceptionForRetry() {
+    return new ConnectionException(new SQLException("Cannot connect"),"Connection attempt failed, Please retry");
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescription.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescription.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescription.java
new file mode 100644
index 0000000..d0e4b0c
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescription.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+public interface ColumnDescription {
+  enum DataTypes {
+    TINYINT, //
+    SMALLINT, //
+    INT, //
+    BIGINT, //
+    BOOLEAN, //
+    FLOAT, //
+    DOUBLE, //
+    STRING, //
+    BINARY, // -- (Note: Available in Hive 0.8.0 and later)
+    TIMESTAMP, // -- (Note: Available in Hive 0.8.0 and later)
+    DECIMAL, // -- (Note: Available in Hive 0.11.0 and later)
+    // DECIMAL,(precision, scale)� -- (Note: Available in Hive 0.13.0 and later) Not included.
+    DATE, // -- (Note: Available in Hive 0.12.0 and later)
+    VARCHAR, // -- (Note: Available in Hive 0.12.0 and later)
+    CHAR, // -- (Note: Available in Hive 0.13.0 and later)
+  }
+
+  String getName();
+
+  String getType();
+
+  int getPosition();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescriptionExtended.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescriptionExtended.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescriptionExtended.java
new file mode 100644
index 0000000..d33d3e0
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescriptionExtended.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+public class ColumnDescriptionExtended implements ColumnDescription {
+  private String name;
+  private String type;
+  private int position;
+  private String comment;
+  private boolean partitioned;
+  private boolean sortedBy;
+  private boolean clusteredBy;
+
+  public ColumnDescriptionExtended(String name, String type, String comment, boolean partitioned,
+                                   boolean sortedBy, boolean clusteredBy, int position) {
+    this.name = name;
+    this.type = type;
+    this.comment = comment;
+    this.partitioned = partitioned;
+    this.sortedBy = sortedBy;
+    this.clusteredBy = clusteredBy;
+    this.position = position;
+  }
+
+  public ColumnDescription createShortColumnDescription() {
+    return new ColumnDescriptionShort(getName(), getType(), getPosition());
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public int getPosition() {
+    return position;
+  }
+
+  public String getComment() {
+    return comment;
+  }
+
+  public boolean isPartitioned() {
+    return partitioned;
+  }
+
+  public boolean isSortedBy() {
+    return sortedBy;
+  }
+
+  public boolean isClusteredBy() {
+    return clusteredBy;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescriptionShort.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescriptionShort.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescriptionShort.java
new file mode 100644
index 0000000..9374fd1
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ColumnDescriptionShort.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+import java.util.ArrayList;
+
+public class ColumnDescriptionShort extends ArrayList<Object> implements ColumnDescription {
+  private static final int INITIAL_CAPACITY = 3;
+  private String name;
+  private String type;
+  private int position;
+
+  public ColumnDescriptionShort(String name, String type, int position) {
+    super(INITIAL_CAPACITY);
+    add(name);
+    add(type);
+    add(position);
+    this.name = name;
+    this.type = type;
+    this.position = position;
+  }
+
+  @Override
+  public String getName() {
+    return name;
+  }
+
+  @Override
+  public String getType() {
+    return type;
+  }
+
+  @Override
+  public int getPosition() {
+    return position;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ConnectionConfig.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ConnectionConfig.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ConnectionConfig.java
new file mode 100644
index 0000000..14bea5a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/ConnectionConfig.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+import org.apache.ambari.view.hive20.actor.message.Connect;
+
+public class ConnectionConfig {
+  private final String username;
+  private String password;
+  private final String jdbcUrl;
+
+  public ConnectionConfig(String username, String password, String jdbcUrl) {
+    this.username = username;
+    this.password = password;
+    this.jdbcUrl = jdbcUrl;
+  }
+
+  public void setPassword(String password) {
+    this.password = password;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public String getPassword() {
+    return password;
+  }
+
+  public String getJdbcUrl() {
+    return jdbcUrl;
+  }
+
+  public Connect createConnectMessage() {
+    return new Connect(username, password, jdbcUrl);
+  }
+
+  public Connect createConnectMessage(String jobId) {
+    return new Connect(jobId, username, password, jdbcUrl);
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/Cursor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/Cursor.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/Cursor.java
new file mode 100644
index 0000000..fcf89d4
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/Cursor.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+import java.util.Iterator;
+import java.util.List;
+
+public interface Cursor<T, R> extends Iterator<T>, Iterable<T>{
+  boolean isResettable();
+  void reset();
+  int getOffset();
+  List<R> getDescriptions();
+  void keepAlive();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java
new file mode 100644
index 0000000..ffa9e132
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegator.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+import java.util.List;
+
+public interface DDLDelegator {
+
+  List<String> getDbList(ConnectionConfig config, String like);
+
+  List<String> getTableList(ConnectionConfig config, String database, String like);
+
+  List<Row> getTableDescriptionFormatted(ConnectionConfig config, String database, String table);
+
+  List<Row> getTableCreateStatement(ConnectionConfig config, String database, String table);
+
+  List<ColumnDescription> getTableDescription(ConnectionConfig config, String database, String table, String like, boolean extended);
+
+  Cursor<Row, ColumnDescription> getDbListCursor(ConnectionConfig config, String like);
+
+  Cursor<Row, ColumnDescription> getTableListCursor(ConnectionConfig config, String database, String like);
+
+  Cursor<Row, ColumnDescription> getTableDescriptionCursor(ConnectionConfig config, String database, String table, String like, boolean extended);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java
new file mode 100644
index 0000000..76c7c03
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/DDLDelegatorImpl.java
@@ -0,0 +1,267 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Inbox;
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.actor.message.Connect;
+import org.apache.ambari.view.hive20.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive20.actor.message.GetColumnMetadataJob;
+import org.apache.ambari.view.hive20.actor.message.HiveJob;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.actor.message.job.ExecutionFailed;
+import org.apache.ambari.view.hive20.actor.message.job.FetchFailed;
+import org.apache.ambari.view.hive20.actor.message.job.Next;
+import org.apache.ambari.view.hive20.actor.message.job.NoMoreItems;
+import org.apache.ambari.view.hive20.actor.message.job.NoResult;
+import org.apache.ambari.view.hive20.actor.message.job.Result;
+import org.apache.ambari.view.hive20.actor.message.job.ResultSetHolder;
+import org.apache.ambari.view.hive20.utils.HiveActorConfiguration;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.concurrent.duration.Duration;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+public class DDLDelegatorImpl implements DDLDelegator {
+
+  public static final String NO_VALUE_MARKER = "NO_VALUE";
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  private final ActorRef controller;
+  private final ActorSystem system;
+
+  private final ViewContext context;
+  private final HiveActorConfiguration actorConfiguration;
+
+  public DDLDelegatorImpl(ViewContext context, ActorSystem system, ActorRef controller) {
+    this.context = context;
+    this.system = system;
+    this.controller = controller;
+    actorConfiguration = new HiveActorConfiguration(context);
+  }
+
+  @Override
+  public List<String> getDbList(ConnectionConfig config, String like) {
+    Optional<Result> rowsFromDB = getRowsFromDB(config, getDatabaseListStatements(like));
+    return rowsFromDB.isPresent() ? getFirstColumnValues(rowsFromDB.get().getRows()) : Lists.<String>newArrayList();
+  }
+
+  @Override
+  public List<String> getTableList(ConnectionConfig config, String database, String like) {
+    Optional<Result> rowsFromDB = getRowsFromDB(config, getTableListStatements(database, like));
+    return rowsFromDB.isPresent() ? getFirstColumnValues(rowsFromDB.get().getRows()) : Lists.<String>newArrayList();
+  }
+
+  @Override
+  public List<Row> getTableDescriptionFormatted(ConnectionConfig config, String database, String table) {
+    Optional<Result> rowsFromDB = getRowsFromDB(config, getTableDescriptionStatements(database, table));
+    return rowsFromDB.isPresent() ? rowsFromDB.get().getRows() : null;
+  }
+
+  @Override
+  public List<Row> getTableCreateStatement(ConnectionConfig config, String database, String table) {
+    Optional<Result> rowsFromDB = getRowsFromDB(config, getShowCreateTableStatements(database, table));
+    return rowsFromDB.isPresent() ? rowsFromDB.get().getRows() : null;
+  }
+
+  private String[] getShowCreateTableStatements(String database, String table) {
+    return new String[]{
+        String.format("use %s",database),
+        String.format("show create table %s", table)
+    };
+  }
+
+  private String[] getTableDescriptionStatements(String database, String table) {
+    return new String[]{
+      String.format("use %s",database),
+      String.format("describe formatted %s", table)
+    };
+  }
+
+  @Override
+  public List<ColumnDescription> getTableDescription(ConnectionConfig config, String database, String table, String like, boolean extended) {
+ Optional<Result> resultOptional = getTableDescription(config, database, table, like);
+    List<ColumnDescription> descriptions = new ArrayList<>();
+    if(resultOptional.isPresent()) {
+      for (Row row : resultOptional.get().getRows()) {
+        Object[] values = row.getRow();
+        String name = (String) values[3];
+        String type = (String) values[5];
+        int position = (Integer) values[16];
+        descriptions.add(new ColumnDescriptionShort(name, type, position));
+      }
+    }
+    return descriptions;
+  }
+
+  @Override
+  public Cursor<Row, ColumnDescription> getDbListCursor(ConnectionConfig config, String like) {
+    Optional<Result> rowsFromDB = getRowsFromDB(config, getDatabaseListStatements(like));
+    if (rowsFromDB.isPresent()) {
+      Result result = rowsFromDB.get();
+      return new PersistentCursor<>(result.getRows(), result.getColumns());
+    } else {
+      return new PersistentCursor<>(Lists.<Row>newArrayList(), Lists.<ColumnDescription>newArrayList());
+    }
+  }
+
+  @Override
+  public Cursor<Row, ColumnDescription> getTableListCursor(ConnectionConfig config, String database, String like) {
+    Optional<Result> rowsFromDB = getRowsFromDB(config, getTableListStatements(database, like));
+    if (rowsFromDB.isPresent()) {
+      Result result = rowsFromDB.get();
+      return new PersistentCursor<>(result.getRows(), result.getColumns());
+    } else {
+      return new PersistentCursor<>(Lists.<Row>newArrayList(), Lists.<ColumnDescription>newArrayList());
+    }
+  }
+
+  @Override
+  public Cursor<Row, ColumnDescription> getTableDescriptionCursor(ConnectionConfig config, String database, String table, String like, boolean extended) {
+    Optional<Result> tableDescriptionOptional = getTableDescription(config, database, table, like);
+    if(tableDescriptionOptional.isPresent()) {
+      Result result = tableDescriptionOptional.get();
+      return new PersistentCursor<>(result.getRows(), result.getColumns());
+    } else {
+      return new PersistentCursor<>(Lists.<Row>newArrayList(), Lists.<ColumnDescription>newArrayList());
+    }
+  }
+
+  private String[] getDatabaseListStatements(String like) {
+    return new String[]{
+      String.format("show databases like '%s'", like)
+    };
+  }
+
+  private String[] getTableListStatements(String database, String like) {
+    return new String[]{
+      String.format("use %s", database),
+      String.format("show tables like '%s'", like)
+    };
+  }
+
+  private Optional<Result> getRowsFromDB(ConnectionConfig config, String[] statements) {
+    Connect connect = config.createConnectMessage();
+    HiveJob job = new SQLStatementJob(HiveJob.Type.SYNC, statements, config.getUsername());
+    ExecuteJob execute = new ExecuteJob(connect, job);
+
+    LOG.info("Executing query: {}, for user: {}", getJoinedStatements(statements), job.getUsername());
+
+    return getResultFromDB(execute);
+  }
+
+  private Optional<Result> getTableDescription(ConnectionConfig config, String databasePattern, String tablePattern, String columnPattern) {
+    Connect connect = config.createConnectMessage();
+    HiveJob job = new GetColumnMetadataJob(config.getUsername(), databasePattern, tablePattern, columnPattern);
+    ExecuteJob execute = new ExecuteJob(connect, job);
+
+    LOG.info("Executing query to fetch the column description for dbPattern: {}, tablePattern: {}, columnPattern: {}, for user: {}",
+      databasePattern, tablePattern, columnPattern, job.getUsername());
+    return getResultFromDB(execute);
+  }
+
+  private Optional<Result> getResultFromDB(ExecuteJob job) {
+    List<ColumnDescription> descriptions = null;
+    List<Row> rows = Lists.newArrayList();
+    Inbox inbox = Inbox.create(system);
+    inbox.send(controller, job);
+    Object submitResult;
+    try {
+      submitResult = inbox.receive(Duration.create(actorConfiguration.getSyncQueryTimeout(60 * 1000), TimeUnit.MILLISECONDS));
+    } catch (Throwable ex) {
+      String errorMessage = "Query timed out to fetch table description for user: " + job.getConnect().getUsername();
+      LOG.error(errorMessage, ex);
+      throw new ServiceFormattedException(errorMessage, ex);
+    }
+
+    if (submitResult instanceof NoResult) {
+      LOG.info("Query returned with no result.");
+      return Optional.absent();
+    }
+
+    if (submitResult instanceof ExecutionFailed) {
+      ExecutionFailed error = (ExecutionFailed) submitResult;
+      LOG.error("Failed to get the table description");
+      throw new ServiceFormattedException(error.getMessage(), error.getError());
+
+    } else if (submitResult instanceof ResultSetHolder) {
+      ResultSetHolder holder = (ResultSetHolder) submitResult;
+      ActorRef iterator = holder.getIterator();
+      while (true) {
+        inbox.send(iterator, new Next());
+        Object receive;
+        try {
+          receive = inbox.receive(Duration.create(actorConfiguration.getResultFetchTimeout(60 * 1000), TimeUnit.MILLISECONDS));
+        } catch (Throwable ex) {
+          String errorMessage = "Query timed out to fetch results for user: " + job.getConnect().getUsername();
+          LOG.error(errorMessage, ex);
+          throw new ServiceFormattedException(errorMessage, ex);
+        }
+
+        if (receive instanceof Result) {
+          Result result = (Result) receive;
+          if (descriptions == null) {
+            descriptions = result.getColumns();
+          }
+          rows.addAll(result.getRows());
+        }
+
+        if (receive instanceof NoMoreItems) {
+          break;
+        }
+
+        if (receive instanceof FetchFailed) {
+          FetchFailed error = (FetchFailed) receive;
+          LOG.error("Failed to fetch results ");
+          throw new ServiceFormattedException(error.getMessage(), error.getError());
+        }
+      }
+
+    }
+    return Optional.of(new Result(rows, descriptions));
+  }
+
+  private String getJoinedStatements(String[] statements) {
+    return Joiner.on("; ").skipNulls().join(statements);
+  }
+
+  private ImmutableList<String> getFirstColumnValues(List<Row> rows) {
+    return FluentIterable.from(rows)
+      .transform(new Function<Row, String>() {
+        @Override
+        public String apply(Row input) {
+          Object[] values = input.getRow();
+          return values.length > 0 ? (String) values[0] : NO_VALUE_MARKER;
+        }
+      }).toList();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/EmptyCursor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/EmptyCursor.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/EmptyCursor.java
new file mode 100644
index 0000000..17af66e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/EmptyCursor.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+import com.beust.jcommander.internal.Lists;
+import org.apache.commons.lang.NotImplementedException;
+
+import java.util.Iterator;
+import java.util.List;
+
+public class EmptyCursor implements Cursor<Row, ColumnDescription> {
+
+    private List<Row> rows = Lists.newArrayList();
+    private List<ColumnDescription> desc = Lists.newArrayList();
+
+
+    @Override
+    public boolean isResettable() {
+        return false;
+    }
+
+    @Override
+    public void reset() {
+
+    }
+
+    @Override
+    public int getOffset() {
+        return 0;
+    }
+
+    @Override
+    public List<ColumnDescription> getDescriptions() {
+        return desc;
+    }
+
+  @Override
+  public void keepAlive() {
+    // Do Nothing
+  }
+
+  /**
+     * Returns an iterator over a set of elements of type T.
+     *
+     * @return an Iterator.
+     */
+    @Override
+    public Iterator<Row> iterator() {
+        return rows.iterator();
+    }
+
+    /**
+     * Returns {@code true} if the iteration has more elements.
+     * (In other words, returns {@code true} if {@link #next} would
+     * return an element rather than throwing an exception.)
+     *
+     * @return {@code true} if the iteration has more elements
+     */
+    @Override
+    public boolean hasNext() {
+        return false;
+    }
+
+    /**
+     * Returns the next element in the iteration.
+     *
+     * @return the next element in the iteration
+     * @throws NotImplementedException  if the iteration has no more elements
+     */
+    @Override
+    public Row next() {
+        throw new NotImplementedException();
+    }
+
+    /**
+     * Removes from the underlying collection the last element returned
+     * by this iterator (optional operation).  This method can be called
+     * only once per call to {@link #next}.  The behavior of an iterator
+     * is unspecified if the underlying collection is modified while the
+     * iteration is in progress in any way other than by calling this
+     * method.
+     *
+     * @throws UnsupportedOperationException if the {@code remove}
+     *                                       operation is not supported by this iterator
+     * @throws IllegalStateException         if the {@code next} method has not
+     *                                       yet been called, or the {@code remove} method has already
+     *                                       been called after the last call to the {@code next}
+     *                                       method
+     */
+    @Override
+    public void remove() {
+        throw new NotImplementedException();
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveAuthCredentials.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveAuthCredentials.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveAuthCredentials.java
new file mode 100644
index 0000000..2897f22
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveAuthCredentials.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+public class HiveAuthCredentials {
+  private String password;
+
+  public String getPassword() {
+    return password;
+  }
+
+  public void setPassword(String password) {
+    this.password = password;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveAuthRequiredException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveAuthRequiredException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveAuthRequiredException.java
new file mode 100644
index 0000000..f34479e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveAuthRequiredException.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+
+public class HiveAuthRequiredException extends ServiceFormattedException {
+  public HiveAuthRequiredException() {
+    super("Hive Password Required", null, 401);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientAuthRequiredException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientAuthRequiredException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientAuthRequiredException.java
new file mode 100644
index 0000000..41c0d64
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientAuthRequiredException.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+public class HiveClientAuthRequiredException extends Exception {
+  public HiveClientAuthRequiredException(String comment, Exception ex) {
+    super(comment + ((ex == null)?"":(": " + ex.toString())), ex);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientException.java
new file mode 100644
index 0000000..4caf50e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientException.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+public class HiveClientException extends Exception {
+  public HiveClientException(String comment, Exception ex) {
+    super(comment + ((ex == null)?"":(": " + ex.toString())), ex);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientRuntimeException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientRuntimeException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientRuntimeException.java
new file mode 100644
index 0000000..c5fcf69
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/HiveClientRuntimeException.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+public class HiveClientRuntimeException extends RuntimeException {
+  public HiveClientRuntimeException(String comment, Exception ex) {
+    super(comment + ((ex == null)?"":(": " + ex.toString())), ex);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/NonPersistentCursor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/NonPersistentCursor.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/NonPersistentCursor.java
new file mode 100644
index 0000000..40468f7
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/NonPersistentCursor.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Inbox;
+import com.google.common.collect.Lists;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.actor.message.lifecycle.KeepAlive;
+import org.apache.ambari.view.hive20.utils.HiveActorConfiguration;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.ambari.view.hive20.actor.message.job.FetchFailed;
+import org.apache.ambari.view.hive20.actor.message.job.Next;
+import org.apache.ambari.view.hive20.actor.message.job.NoMoreItems;
+import org.apache.ambari.view.hive20.actor.message.job.Result;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.concurrent.duration.Duration;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Queue;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Wrapper over iterator actor and blocks to fetch Rows and ColumnDescription whenever there is no more Rows to be
+ * returned.
+ */
+public class NonPersistentCursor implements Cursor<Row, ColumnDescription> {
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+  private static long DEFAULT_WAIT_TIMEOUT = 60 * 1000L;
+
+  private final ActorSystem system;
+  private final ActorRef actorRef;
+  private final ViewContext context;
+  private final HiveActorConfiguration actorConfiguration;
+  private final Queue<Row> rows = Lists.newLinkedList();
+  private final List<ColumnDescription> descriptions = Lists.newLinkedList();
+  private int offSet = 0;
+  private boolean endReached = false;
+
+
+  public NonPersistentCursor(ViewContext context, ActorSystem system, ActorRef actorRef) {
+    this.context = context;
+    this.system = system;
+    this.actorRef = actorRef;
+    actorConfiguration = new HiveActorConfiguration(context);
+  }
+
+  @Override
+  public boolean isResettable() {
+    return false;
+  }
+
+  @Override
+  public void reset() {
+    // Do nothing
+  }
+
+  @Override
+  public int getOffset() {
+    return offSet;
+  }
+
+  @Override
+  public List<ColumnDescription> getDescriptions() {
+    fetchIfNeeded();
+    return descriptions;
+  }
+
+  @Override
+  public void keepAlive() {
+    Inbox inbox = Inbox.create(system);
+    inbox.send(actorRef, new KeepAlive());
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return this;
+  }
+
+  @Override
+  public boolean hasNext() {
+    fetchIfNeeded();
+    return !endReached;
+  }
+
+  @Override
+  public Row next() {
+    fetchIfNeeded();
+    offSet++;
+    return rows.poll();
+  }
+
+  @Override
+  public void remove() {
+    throw new RuntimeException("Read only cursor. Method not supported");
+  }
+
+  private void fetchIfNeeded() {
+    if (endReached || rows.size() > 0) return;
+    getNextRows();
+  }
+
+  private void getNextRows() {
+    Inbox inbox = Inbox.create(system);
+    inbox.send(actorRef, new Next());
+    Object receive;
+    try {
+      receive = inbox.receive(Duration.create(actorConfiguration.getResultFetchTimeout(DEFAULT_WAIT_TIMEOUT),
+        TimeUnit.MILLISECONDS));
+    } catch (Throwable ex) {
+      String errorMessage = "Result fetch timed out";
+      LOG.error(errorMessage, ex);
+      throw new ServiceFormattedException(errorMessage, ex);
+    }
+
+    if (receive instanceof Result) {
+      Result result = (Result) receive;
+      if (descriptions.isEmpty()) {
+        descriptions.addAll(result.getColumns());
+      }
+      rows.addAll(result.getRows());
+    }
+
+    if (receive instanceof NoMoreItems) {
+      endReached = true;
+    }
+
+    if (receive instanceof FetchFailed) {
+      FetchFailed error = (FetchFailed) receive;
+      LOG.error("Failed to fetch results ");
+      throw new ServiceFormattedException(error.getMessage(), error.getError());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/PersistentCursor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/PersistentCursor.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/PersistentCursor.java
new file mode 100644
index 0000000..1fa1ceb
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/PersistentCursor.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+
+import com.google.common.collect.Lists;
+
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Wrapper over other iterables. Does not block and can be reset to start again from beginning.
+ */
+public class PersistentCursor<T, R> implements Cursor<T, R>  {
+  private List<T> rows = Lists.newArrayList();
+  private List<R> columns = Lists.newArrayList();
+  private int offset = 0;
+
+  public PersistentCursor(List<T> rows, List<R> columns) {
+    this.rows = rows;
+    this.columns = columns;
+  }
+
+
+  @Override
+  public Iterator<T> iterator() {
+    return this;
+  }
+
+  @Override
+  public boolean hasNext() {
+    return rows.size() > 0 && offset < rows.size();
+  }
+
+  @Override
+  public T next() {
+    T row = rows.get(offset);
+    offset++;
+    return row;
+  }
+
+  @Override
+  public void remove() {
+    throw new RuntimeException("Read only cursor. Method not supported");
+  }
+
+  @Override
+  public boolean isResettable() {
+    return true;
+  }
+
+  @Override
+  public void reset() {
+    this.offset = 0;
+  }
+
+  @Override
+  public int getOffset() {
+    return offset;
+  }
+
+  @Override
+  public List<R> getDescriptions() {
+    return columns;
+  }
+
+  @Override
+  public void keepAlive() {
+    // Do Nothing as we are pre-fetching everything.
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/Row.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/Row.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/Row.java
new file mode 100644
index 0000000..e1139f2
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/client/Row.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.client;
+
+import java.util.Arrays;
+import java.util.HashSet;
+
+public class Row {
+  private Object[] row;
+
+  public Row(Object[] row) {
+    this(row, null);
+  }
+
+  public Row(Object[] row, HashSet<Integer> selectedColumns) {
+    if (selectedColumns == null || selectedColumns.size() == 0)
+      this.row = row.clone();
+    else {
+      this.row = new Object[selectedColumns.size()];
+      int rowIndex = 0;
+      for (Integer selectedIndex : selectedColumns) {
+        this.row[rowIndex] = row[selectedIndex];
+        rowIndex ++;
+      }
+    }
+  }
+
+  public Object[] getRow() {
+    return row;
+  }
+
+  public void setRow(Object[] row) {
+    this.row = row;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    Row row1 = (Row) o;
+
+    boolean retValue = Arrays.equals(row, row1.row);
+    return retValue;
+  }
+
+  @Override
+  public int hashCode() {
+    return Arrays.hashCode(row);
+  }
+
+  @Override
+  public String toString() {
+    return "Row{" +
+            "row=" + Arrays.toString(row) +
+            '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/exceptions/NotConnectedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/exceptions/NotConnectedException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/exceptions/NotConnectedException.java
new file mode 100644
index 0000000..6096727
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/exceptions/NotConnectedException.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.exceptions;
+
+/**
+ * Exception thrown when the connection is not made and we try to execute some job
+ */
+public class NotConnectedException extends RuntimeException {
+  public NotConnectedException(String message) {
+    super(message);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/exceptions/ServiceException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/exceptions/ServiceException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/exceptions/ServiceException.java
new file mode 100644
index 0000000..e260c16
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/exceptions/ServiceException.java
@@ -0,0 +1,40 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.exceptions;
+
+public class ServiceException extends Exception {
+  public ServiceException() {
+  }
+
+  public ServiceException(String message) {
+    super(message);
+  }
+
+  public ServiceException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public ServiceException(Throwable cause) {
+    super(cause);
+  }
+
+  public ServiceException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/AsyncExecutionFailure.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/AsyncExecutionFailure.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/AsyncExecutionFailure.java
new file mode 100644
index 0000000..d8dd3d3
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/AsyncExecutionFailure.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+public class AsyncExecutionFailure {
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/AsyncExecutionSuccess.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/AsyncExecutionSuccess.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/AsyncExecutionSuccess.java
new file mode 100644
index 0000000..63e4112
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/AsyncExecutionSuccess.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+public class AsyncExecutionSuccess {
+
+
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/Connectable.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/Connectable.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/Connectable.java
new file mode 100644
index 0000000..10c6e7d
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/Connectable.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+import com.google.common.base.Optional;
+import org.apache.hive.jdbc.HiveConnection;
+
+/**
+ * Life cycle management for java.sql.Connection
+ */
+public interface Connectable  {
+
+    /**
+     * Get the underlying connection
+     * @return an optional wrapping the connection
+     */
+    Optional<HiveConnection> getConnection();
+
+    /**
+     * Check if the connection is open
+     * @return
+     */
+    boolean isOpen();
+
+    /**
+     * Open a connection
+     * @throws ConnectionException
+     */
+    void connect() throws ConnectionException;
+
+    /**
+     * Reconnect if closed
+     * @throws ConnectionException
+     */
+    void reconnect() throws ConnectionException;
+
+    /**
+     * Close the connection
+     * @throws ConnectionException
+     */
+    void disconnect() throws ConnectionException;
+
+    /**
+     * True when the connection is unauthorized
+     * @return
+     */
+    boolean isUnauthorized();
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionException.java
new file mode 100644
index 0000000..14a6397
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionException.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+public class ConnectionException extends Exception {
+    public ConnectionException(Exception e, String message) {
+        super(message,e);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionProperties.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionProperties.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionProperties.java
new file mode 100644
index 0000000..10cd784
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionProperties.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+import com.google.common.collect.Maps;
+
+import java.util.Map;
+
+/**
+ * Holds all information needed to connect to HS2
+ */
+public class ConnectionProperties {
+
+    private String host;
+    private int port;
+    private String userName;
+    private String password;
+    private Map<String, String> authParams = Maps.newHashMap();
+
+    public Map<String, String> getAuthParams() {
+        return authParams;
+    }
+
+    public void addAuthParam(String key,String value){
+        authParams.put(key, value);
+    }
+
+    public String getHost() {
+        return host;
+    }
+
+    public void setHost(String host) {
+        this.host = host;
+    }
+
+    public String getPassword() {
+        return password;
+    }
+
+    public void setPassword(String password) {
+        this.password = password;
+    }
+
+    public int getPort() {
+        return port;
+    }
+
+    public void setPort(int port) {
+        this.port = port;
+    }
+
+    public String getUserName() {
+        return userName;
+    }
+
+    public void setUserName(String userName) {
+        this.userName = userName;
+    }
+
+    @Override
+    public String toString() {
+        return "HiveConnectionProps{" +
+                "authParams=" + authParams +
+                ", host='" + host + '\'' +
+                ", port=" + port +
+                ", userName='" + userName + '\'' +
+                ", password='" + password + '\'' +
+                '}';
+    }
+
+    public String asUrl() {
+        return null;
+    }
+
+    public String asUrlWithoutCredentials() {
+        return null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionSupplier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionSupplier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionSupplier.java
new file mode 100644
index 0000000..b25b4dd
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ConnectionSupplier.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.ConnectionDelegate;
+import org.apache.ambari.view.hive20.HiveJdbcConnectionDelegate;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ConnectionSupplier implements ContextSupplier<ConnectionDelegate> {
+
+  protected final Logger LOG =
+    LoggerFactory.getLogger(getClass());
+
+  @Override
+  public ConnectionDelegate get(ViewContext context) {
+    LOG.debug("Creating Connection delegate instance for Viewname: {}, Instance Name: {}", context.getViewName(), context.getInstanceName());
+    return new HiveJdbcConnectionDelegate();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ContextSupplier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ContextSupplier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ContextSupplier.java
new file mode 100644
index 0000000..dff5e25
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/ContextSupplier.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+import org.apache.ambari.view.ViewContext;
+
+/**
+ * A class that can supply objects of same type.
+ * @param <T>
+ */
+public interface ContextSupplier<T> {
+  /**
+   * Retrieves an instance of appropriate type. The returned object could be a new instance
+   * or an exiting instance. No guarantee on that.
+   * @param context View Context to be used to create the instance
+   * @return instance of appropriateType
+   */
+  T get(ViewContext context);
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/DataStorageSupplier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/DataStorageSupplier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/DataStorageSupplier.java
new file mode 100644
index 0000000..93f8fb1
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/DataStorageSupplier.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.DataStoreStorage;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * A supplier for data storage
+ * Duplicated to keep the API uniform
+ */
+public class DataStorageSupplier implements ContextSupplier<Storage> {
+
+  protected final Logger LOG =
+    LoggerFactory.getLogger(getClass());
+
+  @Override
+  public Storage get(ViewContext context) {
+    LOG.debug("Creating storage instance for Viewname: {}, Instance Name: {}", context.getViewName(), context.getInstanceName());
+    return new DataStoreStorage(context);
+  }
+}


[11/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceManager.java
new file mode 100644
index 0000000..ef497ce
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceManager.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.savedQueries;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.hive20.utils.*;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+
+/**
+ * Object that provides CRUD operations for query objects
+ */
+public class SavedQueryResourceManager extends PersonalCRUDResourceManager<SavedQuery> {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(SavedQueryResourceManager.class);
+
+  private SharedObjectsFactory sharedObjectsFactory;
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public SavedQueryResourceManager(ViewContext context, SharedObjectsFactory sharedObjectsFactory) {
+    super(SavedQuery.class, sharedObjectsFactory, context);
+    this.sharedObjectsFactory = sharedObjectsFactory;
+  }
+
+  @Override
+  public SavedQuery create(SavedQuery object) {
+    object = super.create(object);
+    try {
+
+      if (object.getQueryFile() == null || object.getQueryFile().isEmpty()) {
+        createDefaultQueryFile(object);
+      }
+
+    } catch (ServiceFormattedException e) {
+      cleanupAfterErrorAndThrowAgain(object, e);
+    }
+    return object;
+  }
+
+  private void createDefaultQueryFile(SavedQuery object) {
+    String userScriptsPath = context.getProperties().get("scripts.dir");
+    if (userScriptsPath == null) {
+      String msg = "scripts.dir is not configured!";
+      LOG.error(msg);
+      throw new MisconfigurationFormattedException("scripts.dir");
+    }
+
+    String normalizedName = String.format("hive-query-%s", object.getId());
+    String timestamp = new SimpleDateFormat("yyyy-MM-dd_hh-mm").format(new Date());
+    String baseFileName = String.format(userScriptsPath +
+        "/%s-%s", normalizedName, timestamp);
+
+    String newFilePath = null;
+    try {
+      newFilePath = HdfsUtil.findUnallocatedFileName(sharedObjectsFactory.getHdfsApi(), baseFileName, ".hql");
+      HdfsUtil.putStringToFile(sharedObjectsFactory.getHdfsApi(), newFilePath, "");
+    } catch (HdfsApiException e) {
+      throw new ServiceFormattedException(e);
+    }
+
+    object.setQueryFile(newFilePath);
+    storageFactory.getStorage().store(SavedQuery.class, object);
+  }
+
+  @Override
+  public SavedQuery read(Object id) throws ItemNotFound {
+    SavedQuery savedQuery = super.read(id);
+    fillShortQueryField(savedQuery);
+    return savedQuery;
+  }
+
+  private void fillShortQueryField(SavedQuery savedQuery) {
+    if (savedQuery.getQueryFile() != null) {
+      FilePaginator paginator = new FilePaginator(savedQuery.getQueryFile(), sharedObjectsFactory.getHdfsApi());
+      String query = null;
+      try {
+        query = paginator.readPage(0);
+      } catch (IOException e) {
+        LOG.error("Can't read query file " + savedQuery.getQueryFile());
+        return;
+      } catch (InterruptedException e) {
+        LOG.error("Can't read query file " + savedQuery.getQueryFile());
+        return;
+      }
+      savedQuery.setShortQuery(makeShortQuery(query));
+    }
+    storageFactory.getStorage().store(SavedQuery.class, savedQuery);
+  }
+
+  private void emptyShortQueryField(SavedQuery query) {
+    query.setShortQuery("");
+    storageFactory.getStorage().store(SavedQuery.class, query);
+  }
+
+  /**
+   * Generate short preview of query.
+   * Remove SET settings like "set hive.execution.engine=tez;" from beginning
+   * and trim to 42 symbols.
+   * @param query full query
+   * @return shortened query
+   */
+  protected static String makeShortQuery(String query) {
+    query = query.replaceAll("(?i)set\\s+[\\w\\-.]+(\\s*)=(\\s*)[\\w\\-.]+(\\s*);", "");
+    query = query.trim();
+    return query.substring(0, (query.length() > 42)?42:query.length());
+  }
+
+  @Override
+  public SavedQuery update(SavedQuery newObject, String id) throws ItemNotFound {
+    SavedQuery savedQuery = super.update(newObject, id);
+    // Emptying short query so that in next read, this gets updated with proper value
+    // from the queryFile
+    emptyShortQueryField(savedQuery);
+    return savedQuery;
+  }
+
+  @Override
+  public List<SavedQuery> readAll(FilteringStrategy filteringStrategy) {
+    List<SavedQuery> queries = super.readAll(filteringStrategy);
+    for(SavedQuery query : queries) {
+      String shortQuery = query.getShortQuery();
+      if(shortQuery == null || shortQuery.isEmpty()) {
+        fillShortQueryField(query);
+      }
+    }
+    return queries;
+  }
+
+  @Override
+  public void delete(Object resourceId) throws ItemNotFound {
+    super.delete(resourceId);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceProvider.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceProvider.java
new file mode 100644
index 0000000..ba28060
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceProvider.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.savedQueries;
+
+import org.apache.ambari.view.*;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Resource provider for SavedQuery
+ */
+public class SavedQueryResourceProvider implements ResourceProvider<SavedQuery> {
+  @Inject
+  ViewContext context;
+
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(SavedQueryResourceProvider.class);
+  private SharedObjectsFactory sharedObjectsFactory;
+
+  public SharedObjectsFactory getSharedObjectsFactory() {
+    if (sharedObjectsFactory == null)
+      sharedObjectsFactory = new SharedObjectsFactory(context);
+    return sharedObjectsFactory;
+  }
+
+  protected synchronized SavedQueryResourceManager getResourceManager() {
+    return getSharedObjectsFactory().getSavedQueryResourceManager();
+  }
+
+  @Override
+  public SavedQuery getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      return getResourceManager().read(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+  }
+
+  @Override
+  public Set<SavedQuery> getResources(ReadRequest readRequest) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    if (context == null) {
+      return new HashSet<SavedQuery>();
+    }
+    return new HashSet<SavedQuery>(getResourceManager().readAll(
+        new OnlyOwnersFilteringStrategy(this.context.getUsername())));
+  }
+
+  @Override
+  public void createResource(String s, Map<String, Object> stringObjectMap) throws SystemException, ResourceAlreadyExistsException, NoSuchResourceException, UnsupportedPropertyException {
+    SavedQuery item = null;
+    try {
+      item = new SavedQuery(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on creating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on creating resource", e);
+    }
+    getResourceManager().create(item);
+  }
+
+  @Override
+  public boolean updateResource(String resourceId, Map<String, Object> stringObjectMap) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    SavedQuery item = null;
+    try {
+      item = new SavedQuery(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on updating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on updating resource", e);
+    }
+    try {
+      getResourceManager().update(item, resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+
+  @Override
+  public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      getResourceManager().delete(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryService.java
new file mode 100644
index 0000000..7139ce8
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryService.java
@@ -0,0 +1,267 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.savedQueries;
+
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive20.utils.NotFoundFormattedException;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.*;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.StreamingOutput;
+import javax.ws.rs.core.UriInfo;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.InputStreamReader;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.util.List;
+
+/**
+ * Servlet for queries
+ * API:
+ * GET /:id
+ *      read SavedQuery
+ * POST /
+ *      create new SavedQuery
+ *      Required: title, queryFile
+ * GET /
+ *      get all SavedQueries of current user
+ */
+public class SavedQueryService extends BaseService {
+  @Inject
+  ViewResourceHandler handler;
+
+  protected SavedQueryResourceManager resourceManager = null;
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(SavedQueryService.class);
+
+  protected synchronized SavedQueryResourceManager getResourceManager() {
+    return getSharedObjectsFactory().getSavedQueryResourceManager();
+  }
+
+  protected void setResourceManager(SavedQueryResourceManager resourceManager) {
+    this.resourceManager = resourceManager;
+  }
+
+  /**
+   * Get single item
+   */
+  @GET
+  @Path("{queryId}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getOne(@PathParam("queryId") String queryId,
+		         @QueryParam("op") String operation) {
+    try {
+      final SavedQuery savedQuery = getResourceManager().read(queryId);    
+      if(operation.equals("download")) {
+    	StreamingOutput stream = new StreamingOutput() {
+    	@Override
+    	public void write(OutputStream os) throws IOException, WebApplicationException {
+    	  Writer writer = new BufferedWriter(new OutputStreamWriter(os));
+    	  try { 
+            BufferedReader br=new BufferedReader(new InputStreamReader(getSharedObjectsFactory().getHdfsApi().open(savedQuery.getQueryFile())));
+	    String line;
+    	    line=br.readLine();
+    	    while (line != null){
+    	      writer.write(line+"\n");  
+    	      line = br.readLine();
+            }
+            writer.flush();
+    	  } catch (InterruptedException e) {
+	    e.printStackTrace();
+	  } finally {
+	    writer.close();
+    	  }
+    	}
+    	};
+    	return Response.ok(stream).
+               type(MediaType.TEXT_PLAIN).
+    	       build();
+      }
+      else {
+    	 JSONObject object = new JSONObject();
+         object.put("savedQuery", savedQuery);
+         return Response.ok(object).build();
+      }
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Delete single item
+   */
+  @DELETE
+  @Path("{queryId}")
+  public Response delete(@PathParam("queryId") String queryId) {
+    try {
+      getResourceManager().delete(queryId);
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Get all SavedQueries
+   */
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getList() {
+    try {
+      LOG.debug("Getting all SavedQuery");
+      List allSavedQueries = getResourceManager().readAll(
+          new OnlyOwnersFilteringStrategy(this.context.getUsername()));  //TODO: move strategy to PersonalCRUDRM
+
+      JSONObject object = new JSONObject();
+      object.put("savedQueries", allSavedQueries);
+      return Response.ok(object).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Update item
+   */
+  @PUT
+  @Path("{queryId}")
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response update(SavedQueryRequest request,
+                         @PathParam("queryId") String queryId) {
+    try {
+      getResourceManager().update(request.savedQuery, queryId);
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Create savedQuery
+   */
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response create(SavedQueryRequest request, @Context HttpServletResponse response,
+                         @Context UriInfo ui) {
+    try {
+      getResourceManager().create(request.savedQuery);
+
+      SavedQuery item = null;
+
+      item = getResourceManager().read(request.savedQuery.getId());
+
+      response.setHeader("Location",
+          String.format("%s/%s", ui.getAbsolutePath().toString(), request.savedQuery.getId()));
+
+      JSONObject object = new JSONObject();
+      object.put("savedQuery", item);
+      return Response.ok(object).status(201).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Get default settings for query
+   */
+  @GET
+  @Path("defaultSettings")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getDefaultSettings() {
+    try {
+      String defaultsFile = context.getProperties().get("scripts.settings.defaults-file");
+      HdfsApi hdfsApi = getSharedObjectsFactory().getHdfsApi();
+
+      String defaults = "{\"settings\": {}}";
+      if (hdfsApi.exists(defaultsFile)) {
+        defaults = HdfsUtil.readFile(hdfsApi, defaultsFile);
+      }
+      return Response.ok(JSONValue.parse(defaults)).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Set default settings for query (overwrites if present)
+   */
+  @POST
+  @Path("defaultSettings")
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response setDefaultSettings(JSONObject settings) {
+    try {
+      String defaultsFile = context.getProperties().get("scripts.settings.defaults-file");
+      HdfsApi hdfsApi = getSharedObjectsFactory().getHdfsApi();
+
+      HdfsUtil.putStringToFile(hdfsApi, defaultsFile,
+          settings.toString());
+      String defaults = HdfsUtil.readFile(hdfsApi, defaultsFile);
+      return Response.ok(JSONValue.parse(defaults)).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Wrapper object for json mapping
+   */
+  public static class SavedQueryRequest {
+    public SavedQuery savedQuery;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/Setting.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/Setting.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/Setting.java
new file mode 100644
index 0000000..b9b0c3b
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/Setting.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.settings;
+
+import org.apache.ambari.view.hive20.persistence.utils.PersonalResource;
+
+import java.io.Serializable;
+
+/**
+ * Entity to store the users settings
+ */
+public class Setting implements Serializable, PersonalResource {
+
+  private String id;
+  private String owner;
+  private String key;
+  private String value;
+
+
+  @Override
+  public String getOwner() {
+    return owner;
+  }
+
+  @Override
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  public String getKey() {
+    return key;
+  }
+
+  public void setKey(String key) {
+    this.key = key;
+  }
+
+  public String getValue() {
+    return value;
+  }
+
+  public void setValue(String value) {
+    this.value = value;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/SettingsResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/SettingsResourceManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/SettingsResourceManager.java
new file mode 100644
index 0000000..6e902db
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/SettingsResourceManager.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.settings;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Predicate;
+import com.google.common.collect.FluentIterable;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.StorageFactory;
+import org.apache.ambari.view.hive20.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.hive20.utils.UniqueConstraintViolationException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Nullable;
+import javax.inject.Inject;
+import java.util.List;
+
+/**
+ *
+ */
+public class SettingsResourceManager extends PersonalCRUDResourceManager<Setting> {
+
+  protected final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  private final ViewContext context;
+
+  @Inject
+  public SettingsResourceManager(ViewContext context) {
+    super(Setting.class, new StorageFactory(context), context);
+    this.context = context;
+  }
+
+  public List<Setting> getSettings() {
+    String username = context.getUsername();
+    return super.readAll(new OnlyOwnersFilteringStrategy(username));
+  }
+
+  public void removeSetting(String id) throws ItemNotFound {
+    super.delete(id);
+  }
+
+  public Setting create(Setting setting) {
+    List<Setting> settings = getSettings();
+    if (checkUniqueViolation(settings, setting)) {
+      LOG.error("Setting key: {} already exist for the user.", setting.getKey());
+      throw new UniqueConstraintViolationException("Setting key: " + setting.getKey() + " already exists for the user");
+    }
+    return super.create(setting);
+  }
+
+  public Setting update(String id, Setting setting) throws ItemNotFound {
+    Setting current = read(id);
+    if(!current.getKey().equalsIgnoreCase(setting.getKey())) {
+      // If the settings key has changed
+      List<Setting> settings = getSettings();
+      if (checkUniqueViolation(settings, setting)) {
+        LOG.error("Setting key: {} already exist for the user.", setting.getKey());
+        throw new UniqueConstraintViolationException("Setting key: " + setting.getKey() + " already exists for the user");
+      }
+    }
+    return super.update(setting, id);
+  }
+
+  /**
+   * @param settings List of all settings for the user
+   * @param setting  new settings to be created
+   * @return true if the settings key is not unique
+   */
+  private boolean checkUniqueViolation(List<Setting> settings, final Setting setting) {
+    Optional<Setting> settingOptional = FluentIterable.from(settings).filter(new Predicate<Setting>() {
+      @Override
+      public boolean apply(@Nullable Setting input) {
+        return input.getKey().equalsIgnoreCase(setting.getKey());
+      }
+    }).first();
+    return settingOptional.isPresent();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/SettingsService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/SettingsService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/SettingsService.java
new file mode 100644
index 0000000..240878b
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/settings/SettingsService.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.settings;
+
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.utils.NotFoundFormattedException;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.util.List;
+
+/**
+ * Service to support the API call for basic CRUD operations of User Setting
+ */
+public class SettingsService extends BaseService {
+
+  protected final Logger LOG =
+      LoggerFactory.getLogger(getClass());
+
+  private final SettingsResourceManager resourceManager;
+
+  @Inject
+  public SettingsService(SettingsResourceManager resourceManager) {
+    this.resourceManager = resourceManager;
+  }
+
+
+  /**
+   * Gets all the settings for the current user
+   */
+  @GET
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getAll() {
+    List<Setting> settings = resourceManager.getSettings();
+    JSONObject response = new JSONObject();
+    response.put("settings", settings);
+    return Response.ok(response).build();
+  }
+
+  /**
+   * Adds a setting for the current user
+   */
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response addSetting(SettingRequest settingRequest, @Context HttpServletResponse response, @Context UriInfo uriInfo) {
+    Setting setting = resourceManager.create(settingRequest.getSetting());
+
+    response.setHeader("Location",
+        String.format("%s/%s", uriInfo.getAbsolutePath().toString(), setting.getId()));
+
+    JSONObject op = new JSONObject();
+    op.put("setting", setting);
+    return Response.ok(op).build();
+  }
+
+  /**
+   * Updates a setting for the current user
+   */
+  @PUT
+  @Path("/{id}")
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response updateSetting(@PathParam("id") String id, SettingRequest settingRequest, @Context HttpServletResponse response, @Context UriInfo uriInfo) {
+    Setting setting = null;
+    try {
+      setting = resourceManager.update(id, settingRequest.getSetting());
+    } catch (ItemNotFound itemNotFound) {
+      LOG.error("Error occurred while creating settings : ", itemNotFound);
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    }
+
+    response.setHeader("Location",
+        String.format("%s/%s", uriInfo.getAbsolutePath().toString(), setting.getId()));
+
+    JSONObject op = new JSONObject();
+    op.put("setting", setting);
+    return Response.ok(op).build();
+  }
+
+  /**
+   * Deletes a setting for the current user
+   */
+  @DELETE
+  @Path("/{id}")
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response delete(@PathParam("id") String id) {
+    try {
+      resourceManager.removeSetting(id);
+    } catch (ItemNotFound itemNotFound) {
+      LOG.error("Error occurred while updating setting : ", itemNotFound);
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    }
+    return Response.noContent().build();
+  }
+
+  /**
+   * Wrapper class for settings request
+   */
+  public static class SettingRequest {
+    private Setting setting;
+
+    public Setting getSetting() {
+      return setting;
+    }
+
+    public void setSetting(Setting setting) {
+      this.setting = setting;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java
new file mode 100644
index 0000000..0afe43c
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.system;
+
+import akka.actor.ActorRef;
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.ConnectionSystem;
+import org.apache.ambari.view.hive20.actor.message.Ping;
+
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.core.Response;
+
+/**
+ * System services which are required for the working of the application
+ */
+public class SystemService extends BaseService {
+
+  /**
+   * Clients should sent pings to the server at regular interval so that the system could keep alive stuffs or do
+   * cleanup work when the pings stops
+   * @return No content
+   */
+  @POST
+  @Path("ping")
+  public Response ping() {
+    //TODO: Change this to EventBus implementation
+    ActorRef metaDataManager = ConnectionSystem.getInstance().getMetaDataManager(context);
+    metaDataManager.tell(new Ping(context.getUsername(), context.getInstanceName()), ActorRef.noSender());
+    return Response.ok().status(Response.Status.NO_CONTENT).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDF.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDF.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDF.java
new file mode 100644
index 0000000..362eb16
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDF.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.udfs;
+
+import org.apache.ambari.view.hive20.persistence.utils.PersonalResource;
+import org.apache.commons.beanutils.BeanUtils;
+
+import java.io.Serializable;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Map;
+
+/**
+ * Bean to represent UDF
+ */
+public class UDF implements Serializable, PersonalResource {
+  private String name;
+  private String classname;
+  private String fileResource;
+
+  private String id;
+  private String owner;
+
+  public UDF() {}
+  public UDF(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
+    BeanUtils.populate(this, stringObjectMap);
+  }
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public String getOwner() {
+    return owner;
+  }
+
+  @Override
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getClassname() {
+    return classname;
+  }
+
+  public void setClassname(String classname) {
+    this.classname = classname;
+  }
+
+  public String getFileResource() {
+    return fileResource;
+  }
+
+  public void setFileResource(String fileResource) {
+    this.fileResource = fileResource;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFResourceManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFResourceManager.java
new file mode 100644
index 0000000..34d9c46
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFResourceManager.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.udfs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.IStorageFactory;
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.resources.PersonalCRUDResourceManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+/**
+ * Object that provides CRUD operations for udf objects
+ */
+public class UDFResourceManager extends PersonalCRUDResourceManager<UDF> {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(UDFResourceManager.class);
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public UDFResourceManager(IStorageFactory storageFactory, ViewContext context) {
+    super(UDF.class, storageFactory, context);
+  }
+
+  @Override
+  public UDF read(Object id) throws ItemNotFound {
+    return super.read(id);
+  }
+
+  @Override
+  public List<UDF> readAll(FilteringStrategy filteringStrategy) {
+    return super.readAll(filteringStrategy);
+  }
+
+  @Override
+  public UDF create(UDF object) {
+    return super.create(object);
+  }
+
+  @Override
+  public void delete(Object resourceId) throws ItemNotFound {
+    super.delete(resourceId);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFResourceProvider.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFResourceProvider.java
new file mode 100644
index 0000000..5735ea5
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFResourceProvider.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.udfs;
+
+import org.apache.ambari.view.*;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Resource provider for udf
+ */
+public class UDFResourceProvider implements ResourceProvider<UDF> {
+  @Inject
+  ViewContext context;
+
+  protected UDFResourceManager resourceManager = null;
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(UDFResourceProvider.class);
+
+
+  protected synchronized UDFResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new UDFResourceManager(new SharedObjectsFactory(context), context);
+    }
+    return resourceManager;
+  }
+
+  @Override
+  public UDF getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      return getResourceManager().read(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+  }
+
+  @Override
+  public Set<UDF> getResources(ReadRequest readRequest) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    if (context == null) {
+      return new HashSet<UDF>();
+    }
+    return new HashSet<UDF>(getResourceManager().readAll(
+        new OnlyOwnersFilteringStrategy(this.context.getUsername())));
+  }
+
+  @Override
+  public void createResource(String s, Map<String, Object> stringObjectMap) throws SystemException, ResourceAlreadyExistsException, NoSuchResourceException, UnsupportedPropertyException {
+    UDF item = null;
+    try {
+      item = new UDF(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on creating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on creating resource", e);
+    }
+    getResourceManager().create(item);
+  }
+
+  @Override
+  public boolean updateResource(String resourceId, Map<String, Object> stringObjectMap) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    UDF item = null;
+    try {
+      item = new UDF(stringObjectMap);
+    } catch (InvocationTargetException e) {
+      throw new SystemException("error on updating resource", e);
+    } catch (IllegalAccessException e) {
+      throw new SystemException("error on updating resource", e);
+    }
+    try {
+      getResourceManager().update(item, resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+
+  @Override
+  public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
+    try {
+      getResourceManager().delete(resourceId);
+    } catch (ItemNotFound itemNotFound) {
+      throw new NoSuchResourceException(resourceId);
+    }
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFService.java
new file mode 100644
index 0000000..5c8d2de
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/udfs/UDFService.java
@@ -0,0 +1,193 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.udfs;
+
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive20.resources.resources.FileResourceResourceManager;
+import org.apache.ambari.view.hive20.utils.NotFoundFormattedException;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.*;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.util.List;
+
+/**
+ * Servlet for UDFs
+ * API:
+ * GET /:id
+ *      read udf
+ * POST /
+ *      create new udf
+ * GET /
+ *      get all udf of current user
+ */
+public class UDFService extends BaseService {
+  @Inject
+  ViewResourceHandler handler;
+
+  protected UDFResourceManager resourceManager = null;
+  protected FileResourceResourceManager fileResourceResourceManager = null;
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(UDFService.class);
+
+  protected synchronized UDFResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new UDFResourceManager(getSharedObjectsFactory(), context);
+    }
+    return resourceManager;
+  }
+
+  protected synchronized FileResourceResourceManager getFileResourceResourceManager() {
+    if (fileResourceResourceManager == null) {
+      fileResourceResourceManager = new FileResourceResourceManager(getSharedObjectsFactory(), context);
+    }
+    return fileResourceResourceManager;
+  }
+
+  /**
+   * Get single item
+   */
+  @GET
+  @Path("{id}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getOne(@PathParam("id") String id) {
+    try {
+      UDF udf = getResourceManager().read(id);
+      JSONObject object = new JSONObject();
+      object.put("udf", udf);
+      return Response.ok(object).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Delete single item
+   */
+  @DELETE
+  @Path("{id}")
+  public Response delete(@PathParam("id") String id) {
+    try {
+      getResourceManager().delete(id);
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Get all UDFs
+   */
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getList() {
+    try {
+      LOG.debug("Getting all udf");
+      List items = getResourceManager().readAll(
+          new OnlyOwnersFilteringStrategy(this.context.getUsername()));  //TODO: move strategy to PersonalCRUDRM
+
+      JSONObject object = new JSONObject();
+      object.put("udfs", items);
+      return Response.ok(object).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Update item
+   */
+  @PUT
+  @Path("{id}")
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response update(UDFRequest request,
+                         @PathParam("id") String id) {
+    try {
+      if (request.udf.getFileResource() != null)
+        getFileResourceResourceManager().read(request.udf.getFileResource());
+      getResourceManager().update(request.udf, id);
+      return Response.status(204).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Create udf
+   */
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response create(UDFRequest request, @Context HttpServletResponse response,
+                         @Context UriInfo ui) {
+    try {
+      if (request.udf.getFileResource() != null)
+        getFileResourceResourceManager().read(request.udf.getFileResource());
+      getResourceManager().create(request.udf);
+
+      UDF item = null;
+
+      item = getResourceManager().read(request.udf.getId());
+
+      response.setHeader("Location",
+          String.format("%s/%s", ui.getAbsolutePath().toString(), request.udf.getId()));
+
+      JSONObject object = new JSONObject();
+      object.put("udf", item);
+      return Response.ok(object).status(201).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Wrapper object for json mapping
+   */
+  public static class UDFRequest {
+    public UDF udf;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/CSVParams.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/CSVParams.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/CSVParams.java
new file mode 100644
index 0000000..355ed6a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/CSVParams.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads;
+
+import java.io.Serializable;
+
+public class CSVParams implements Serializable {
+
+  public static final char DEFAULT_DELIMITER_CHAR = ',';
+  public static final char DEFAULT_ESCAPE_CHAR = '\\';
+  public static final char DEFAULT_QUOTE_CHAR = '"';
+
+  private Character csvDelimiter;
+  private Character csvEscape;
+  private Character csvQuote;
+
+  public CSVParams() {
+  }
+
+  public CSVParams(Character csvDelimiter, Character csvQuote, Character csvEscape) {
+    this.csvDelimiter = csvDelimiter;
+    this.csvQuote = csvQuote;
+    this.csvEscape = csvEscape;
+  }
+
+  public Character getCsvDelimiter() {
+    return csvDelimiter;
+  }
+
+  public void setCsvDelimiter(Character csvDelimiter) {
+    this.csvDelimiter = csvDelimiter;
+  }
+
+  public Character getCsvEscape() {
+    return csvEscape;
+  }
+
+  public void setCsvEscape(Character csvEscape) {
+    this.csvEscape = csvEscape;
+  }
+
+  public Character getCsvQuote() {
+    return csvQuote;
+  }
+
+  public void setCsvQuote(Character csvQuote) {
+    this.csvQuote = csvQuote;
+  }
+
+  @Override
+  public String toString() {
+    return "CSVParams{" +
+      "csvDelimiter='" + csvDelimiter + '\'' +
+      ", csvEscape='" + csvEscape + '\'' +
+      ", csvQuote='" + csvQuote + '\'' +
+      '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/ColumnDescriptionImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/ColumnDescriptionImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/ColumnDescriptionImpl.java
new file mode 100644
index 0000000..a5c0c3f
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/ColumnDescriptionImpl.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads;
+
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+
+import java.io.Serializable;
+
+/**
+ * implementation of ColumnDescription which also includes scale and precision.
+ */
+public class ColumnDescriptionImpl implements ColumnDescription, Serializable {
+  private String name;
+  private String type;
+  private int position;
+  /**
+   * can be null
+   */
+  private Integer precision;
+  /**
+   * can be null
+   */
+  private Integer scale;
+
+  public ColumnDescriptionImpl() {
+  }
+
+  public ColumnDescriptionImpl(String name, String type, int position) {
+    this.name = name;
+    this.type = type;
+    this.position = position;
+  }
+
+  public ColumnDescriptionImpl(String name, String type, int position, int precision) {
+    this.name = name;
+    this.type = type;
+    this.position = position;
+    this.precision = precision;
+  }
+
+  public ColumnDescriptionImpl(String name, String type, int position, int precision, int scale) {
+    this.name = name;
+    this.type = type;
+    this.position = position;
+    this.precision = precision;
+    this.scale = scale;
+  }
+
+  @Override
+  public String getName() {
+    return name;
+  }
+
+  @Override
+  public String getType() {
+    return type;
+  }
+
+  @Override
+  public int getPosition() {
+    return this.position;
+  }
+
+  public Integer getPrecision() {
+    return precision;
+  }
+
+  public Integer getScale() {
+    return scale;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    ColumnDescriptionImpl that = (ColumnDescriptionImpl) o;
+
+    if (position != that.position) return false;
+    if (!name.equals(that.name)) return false;
+    return type.equals(that.type);
+
+  }
+
+  @Override
+  public int hashCode() {
+    int result = name.hashCode();
+    result = 31 * result + type.hashCode();
+    result = 31 * result + position;
+    return result;
+  }
+
+  @Override
+  public String toString() {
+    return new StringBuilder().append("ColumnDescriptionImpl[")
+            .append("name : ").append(name)
+            .append(", type : " + type)
+            .append(", position : " + position)
+            .append(", precision : " + precision)
+            .append(", scale : " + scale)
+            .append("]").toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/HiveFileType.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/HiveFileType.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/HiveFileType.java
new file mode 100644
index 0000000..872bc9c
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/HiveFileType.java
@@ -0,0 +1,30 @@
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads;
+
+public enum HiveFileType {
+  SEQUENCEFILE,
+  TEXTFILE,
+  RCFILE,
+  ORC,
+  PARQUET,
+  AVRO,
+  INPUTFORMAT;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableDataReader.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableDataReader.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableDataReader.java
new file mode 100644
index 0000000..ee148b8
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableDataReader.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads;
+
+import com.opencsv.CSVWriter;
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.commons.codec.binary.Hex;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Takes row iterator as input.
+ * iterate over rows and creates a CSV formated stream separating rows by endline "\n"
+ * Note : column values should not contain "\n".
+ */
+public class TableDataReader extends Reader {
+
+  private static final int CAPACITY = 1024;
+  private final List<ColumnDescriptionImpl> header;
+  private StringReader stringReader = new StringReader("");
+
+  private Iterator<Row> iterator;
+  private boolean encode = false;
+  public static final char CSV_DELIMITER = '\001';
+
+  public TableDataReader(Iterator<Row> rowIterator, List<ColumnDescriptionImpl> header, boolean encode) {
+    this.iterator = rowIterator;
+    this.encode = encode;
+    this.header = header;
+  }
+
+  @Override
+  public int read(char[] cbuf, int off, int len) throws IOException {
+
+    int totalLen = len;
+    int count = 0;
+    do {
+      int n = stringReader.read(cbuf, off, len);
+
+      if (n != -1) {
+        // n  were read
+        len = len - n; // len more to be read
+        off = off + n; // off now shifted to n more
+        count += n;
+      }
+
+      if (count == totalLen) return count; // all totalLen characters were read
+
+      if (iterator.hasNext()) { // keep reading as long as we keep getting rows
+        StringWriter stringWriter = new StringWriter(CAPACITY);
+        CSVWriter csvPrinter = new CSVWriter(stringWriter,CSV_DELIMITER);
+        Row row = iterator.next();
+        // encode values so that \n and \r are overridden
+        Object[] columnValues = row.getRow();
+        String[] columns = new String[columnValues.length];
+
+        for(int i = 0; i < columnValues.length; i++){
+          String type = header.get(i).getType();
+          if(this.encode &&
+              (
+                ColumnDescription.DataTypes.STRING.toString().equals(type)
+                || ColumnDescription.DataTypes.VARCHAR.toString().equals(type)
+                || ColumnDescription.DataTypes.CHAR.toString().equals(type)
+              )
+            ){
+            columns[i] = Hex.encodeHexString(((String)columnValues[i]).getBytes()); //default charset
+          }else {
+            columns[i] = (String) columnValues[i];
+          }
+        }
+
+        csvPrinter.writeNext(columns,false);
+        stringReader.close(); // close the old string reader
+        stringReader = new StringReader(stringWriter.getBuffer().toString());
+        csvPrinter.close();
+        stringWriter.close();
+      } else {
+        return count == 0 ? -1 : count;
+      }
+    } while (count < totalLen);
+
+    return count;
+  }
+
+  @Override
+  public void close() throws IOException {
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableInput.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableInput.java
new file mode 100644
index 0000000..4f15c57
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableInput.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads;
+
+import org.apache.ambari.view.hive20.resources.uploads.query.TableInfo;
+
+/**
+ * used as input in REST call
+ */
+class TableInput extends TableInfo {
+  public Boolean isFirstRowHeader = Boolean.FALSE;
+
+  public TableInput() {
+  }
+
+  public Boolean getIsFirstRowHeader() {
+    return isFirstRowHeader;
+  }
+
+  public void setIsFirstRowHeader(Boolean isFirstRowHeader) {
+    this.isFirstRowHeader = isFirstRowHeader;
+  }
+
+  public void validate(){
+    if( null == this.getHiveFileType()){
+      throw new IllegalArgumentException("fileType parameter cannot be null.");
+    }
+    if( null == this.getTableName()){
+      throw new IllegalArgumentException("tableName parameter cannot be null.");
+    }
+    if( null == this.getDatabaseName()){
+      throw new IllegalArgumentException("databaseName parameter cannot be null.");
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadFromHdfsInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadFromHdfsInput.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadFromHdfsInput.java
new file mode 100644
index 0000000..c755c02
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadFromHdfsInput.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads;
+
+import java.io.Serializable;
+import java.util.List;
+
+public class UploadFromHdfsInput implements Serializable{
+  private Boolean isFirstRowHeader = Boolean.FALSE;
+  private String inputFileType;
+  private String hdfsPath;
+  private String tableName;
+  private String databaseName;
+  private List<ColumnDescriptionImpl> header;
+  private boolean containsEndlines;
+
+  private String csvDelimiter;
+  private String csvEscape;
+  private String csvQuote;
+
+  public UploadFromHdfsInput() {
+  }
+
+  public String getCsvDelimiter() {
+    return csvDelimiter;
+  }
+
+  public List<ColumnDescriptionImpl> getHeader() {
+    return header;
+  }
+
+  public void setHeader(List<ColumnDescriptionImpl> header) {
+    this.header = header;
+  }
+
+  public boolean isContainsEndlines() {
+    return containsEndlines;
+  }
+
+  public void setContainsEndlines(boolean containsEndlines) {
+    this.containsEndlines = containsEndlines;
+  }
+
+  public void setCsvDelimiter(String csvDelimiter) {
+    this.csvDelimiter = csvDelimiter;
+  }
+
+  public String getCsvEscape() {
+    return csvEscape;
+  }
+
+  public void setCsvEscape(String csvEscape) {
+    this.csvEscape = csvEscape;
+  }
+
+  public String getCsvQuote() {
+    return csvQuote;
+  }
+
+  public void setCsvQuote(String csvQuote) {
+    this.csvQuote = csvQuote;
+  }
+
+  public Boolean getIsFirstRowHeader() {
+    return isFirstRowHeader;
+  }
+
+  public void setIsFirstRowHeader(Boolean firstRowHeader) {
+    isFirstRowHeader = firstRowHeader;
+  }
+
+  public String getInputFileType() {
+    return inputFileType;
+  }
+
+  public void setInputFileType(String inputFileType) {
+    this.inputFileType = inputFileType;
+  }
+
+  public String getHdfsPath() {
+    return hdfsPath;
+  }
+
+  public void setHdfsPath(String hdfsPath) {
+    this.hdfsPath = hdfsPath;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  @Override
+  public String toString() {
+    return new StringBuilder("UploadFromHdfsInput{" )
+            .append("isFirstRowHeader=").append( isFirstRowHeader )
+            .append(", inputFileType='" ).append(inputFileType)
+            .append(", hdfsPath='").append(hdfsPath)
+            .append(", tableName='").append( tableName )
+            .append(", databaseName='").append(databaseName )
+            .append('}').toString();
+  }
+}


[20/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/853a1ce7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/853a1ce7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/853a1ce7

Branch: refs/heads/trunk
Commit: 853a1ce780e2e840903d440b0705b7c56f385db7
Parents: 672fdf0
Author: Nitiraj Rathore <ni...@gmail.com>
Authored: Tue Jan 3 17:12:13 2017 +0530
Committer: Nitiraj Rathore <ni...@gmail.com>
Committed: Tue Jan 3 17:12:13 2017 +0530

----------------------------------------------------------------------
 .../src/main/resources/view.log4j.properties    |   6 +-
 contrib/views/hive20/pom.xml                    | 431 +++++++++++
 .../apache/ambari/view/hive20/AuthParams.java   | 102 +++
 .../apache/ambari/view/hive20/BaseService.java  |  59 ++
 .../ambari/view/hive20/ConnectionDelegate.java  |  37 +
 .../ambari/view/hive20/ConnectionFactory.java   | 167 ++++
 .../ambari/view/hive20/ConnectionSystem.java    | 158 ++++
 .../apache/ambari/view/hive20/DataMigrator.java | 101 +++
 .../apache/ambari/view/hive20/HelpService.java  | 132 ++++
 .../view/hive20/HiveJdbcConnectionDelegate.java | 106 +++
 .../apache/ambari/view/hive20/HiveViewImpl.java |  52 ++
 .../ambari/view/hive20/PropertyValidator.java   | 107 +++
 .../org/apache/ambari/view/hive20/TestBean.java |  36 +
 .../hive20/actor/DatabaseChangeNotifier.java    | 168 ++++
 .../view/hive20/actor/DatabaseManager.java      | 285 +++++++
 .../ambari/view/hive20/actor/DeathWatch.java    |  54 ++
 .../ambari/view/hive20/actor/HiveActor.java     |  50 ++
 .../ambari/view/hive20/actor/JdbcConnector.java | 642 +++++++++++++++
 .../ambari/view/hive20/actor/LogAggregator.java | 119 +++
 .../view/hive20/actor/MetaDataManager.java      | 134 ++++
 .../view/hive20/actor/MetaDataRetriever.java    | 165 ++++
 .../view/hive20/actor/OperationController.java  | 408 ++++++++++
 .../view/hive20/actor/ResultSetIterator.java    | 165 ++++
 .../view/hive20/actor/StatementExecutor.java    | 150 ++++
 .../view/hive20/actor/TableChangeNotifier.java  |  95 +++
 .../view/hive20/actor/YarnAtsGUIDFetcher.java   |  71 ++
 .../view/hive20/actor/message/Connect.java      |  77 ++
 .../view/hive20/actor/message/CursorReset.java  |  22 +
 .../view/hive20/actor/message/ExecuteJob.java   |  38 +
 .../view/hive20/actor/message/ExecuteQuery.java |  23 +
 .../view/hive20/actor/message/FetchError.java   |  42 +
 .../view/hive20/actor/message/FetchResult.java  |  42 +
 .../actor/message/GetColumnMetadataJob.java     |  59 ++
 .../view/hive20/actor/message/GetMoreLogs.java  |  22 +
 .../view/hive20/actor/message/HiveJob.java      |  48 ++
 .../view/hive20/actor/message/HiveMessage.java  |  53 ++
 .../view/hive20/actor/message/JobRejected.java  |  44 ++
 .../view/hive20/actor/message/JobSubmitted.java |  38 +
 .../actor/message/LogAggregationFinished.java   |  21 +
 .../ambari/view/hive20/actor/message/Ping.java  |  40 +
 .../hive20/actor/message/RegisterActor.java     |  34 +
 .../view/hive20/actor/message/ResetCursor.java  |  22 +
 .../hive20/actor/message/ResultInformation.java |  83 ++
 .../hive20/actor/message/ResultNotReady.java    |  40 +
 .../view/hive20/actor/message/ResultReady.java  |  44 ++
 .../view/hive20/actor/message/RunStatement.java |  73 ++
 .../hive20/actor/message/SQLStatementJob.java   |  64 ++
 .../actor/message/StartLogAggregation.java      |  34 +
 .../actor/message/job/AsyncExecutionFailed.java |  46 ++
 .../hive20/actor/message/job/CancelJob.java     |  40 +
 .../actor/message/job/ExecuteNextStatement.java |  22 +
 .../actor/message/job/ExecutionFailed.java      |  31 +
 .../view/hive20/actor/message/job/Failure.java  |  37 +
 .../hive20/actor/message/job/FetchFailed.java   |  31 +
 .../view/hive20/actor/message/job/Next.java     |  22 +
 .../hive20/actor/message/job/NoMoreItems.java   |  21 +
 .../view/hive20/actor/message/job/NoResult.java |  21 +
 .../view/hive20/actor/message/job/Result.java   |  43 ++
 .../actor/message/job/ResultSetHolder.java      |  33 +
 .../actor/message/job/SaveDagInformation.java   |  52 ++
 .../hive20/actor/message/job/SaveGuidToDB.java  |  46 ++
 .../actor/message/job/UpdateYarnAtsGuid.java    |  44 ++
 .../hive20/actor/message/lifecycle/CleanUp.java |  21 +
 .../message/lifecycle/DestroyConnector.java     |  52 ++
 .../actor/message/lifecycle/FreeConnector.java  |  53 ++
 .../message/lifecycle/InactivityCheck.java      |  21 +
 .../actor/message/lifecycle/KeepAlive.java      |  21 +
 .../lifecycle/TerminateInactivityCheck.java     |  21 +
 .../backgroundjobs/BackgroundJobController.java |  84 ++
 .../view/hive20/client/AsyncJobRunner.java      |  38 +
 .../view/hive20/client/AsyncJobRunnerImpl.java  | 143 ++++
 .../view/hive20/client/ColumnDescription.java   |  45 ++
 .../client/ColumnDescriptionExtended.java       |  74 ++
 .../hive20/client/ColumnDescriptionShort.java   |  53 ++
 .../view/hive20/client/ConnectionConfig.java    |  59 ++
 .../ambari/view/hive20/client/Cursor.java       |  30 +
 .../ambari/view/hive20/client/DDLDelegator.java |  40 +
 .../view/hive20/client/DDLDelegatorImpl.java    | 267 +++++++
 .../ambari/view/hive20/client/EmptyCursor.java  | 110 +++
 .../view/hive20/client/HiveAuthCredentials.java |  31 +
 .../client/HiveAuthRequiredException.java       |  27 +
 .../client/HiveClientAuthRequiredException.java |  25 +
 .../view/hive20/client/HiveClientException.java |  25 +
 .../client/HiveClientRuntimeException.java      |  25 +
 .../view/hive20/client/NonPersistentCursor.java | 153 ++++
 .../view/hive20/client/PersistentCursor.java    |  87 +++
 .../apache/ambari/view/hive20/client/Row.java   |  74 ++
 .../exceptions/NotConnectedException.java       |  28 +
 .../hive20/exceptions/ServiceException.java     |  40 +
 .../hive20/internal/AsyncExecutionFailure.java  |  23 +
 .../hive20/internal/AsyncExecutionSuccess.java  |  25 +
 .../view/hive20/internal/Connectable.java       |  65 ++
 .../hive20/internal/ConnectionException.java    |  25 +
 .../hive20/internal/ConnectionProperties.java   |  94 +++
 .../hive20/internal/ConnectionSupplier.java     |  37 +
 .../view/hive20/internal/ContextSupplier.java   |  37 +
 .../hive20/internal/DataStorageSupplier.java    |  42 +
 .../view/hive20/internal/DefaultSupplier.java   |  60 ++
 .../ambari/view/hive20/internal/Either.java     |  79 ++
 .../view/hive20/internal/HdfsApiSupplier.java   |  63 ++
 .../hive20/internal/HiveConnectionWrapper.java  | 152 ++++
 .../ambari/view/hive20/internal/HiveQuery.java  |  71 ++
 .../ambari/view/hive20/internal/HiveResult.java | 160 ++++
 .../ambari/view/hive20/internal/HiveTask.java   |  53 ++
 .../view/hive20/internal/HiveTaskMessage.java   | 118 +++
 .../view/hive20/internal/SafeViewContext.java   | 179 +++++
 .../view/hive20/internal/dto/ColumnInfo.java    | 117 +++
 .../view/hive20/internal/dto/ColumnOrder.java   |  54 ++
 .../view/hive20/internal/dto/DatabaseInfo.java  |  85 ++
 .../hive20/internal/dto/DatabaseResponse.java   |  71 ++
 .../hive20/internal/dto/DetailedTableInfo.java  | 124 +++
 .../ambari/view/hive20/internal/dto/Order.java  |  37 +
 .../view/hive20/internal/dto/PartitionInfo.java |  44 ++
 .../view/hive20/internal/dto/Section.java       |  46 ++
 .../view/hive20/internal/dto/StorageInfo.java   | 124 +++
 .../view/hive20/internal/dto/TableInfo.java     |  79 ++
 .../view/hive20/internal/dto/TableMeta.java     | 125 +++
 .../view/hive20/internal/dto/TableResponse.java |  62 ++
 .../view/hive20/internal/dto/ViewInfo.java      |  52 ++
 .../parsers/AbstractTableMetaParser.java        | 177 +++++
 .../internal/parsers/ColumnInfoParser.java      |  97 +++
 .../parsers/CreateTableStatementParser.java     |  38 +
 .../parsers/DetailedTableInfoParser.java        |  71 ++
 .../hive20/internal/parsers/ParserUtils.java    |  52 ++
 .../internal/parsers/PartitionInfoParser.java   |  76 ++
 .../internal/parsers/StorageInfoParser.java     | 100 +++
 .../internal/parsers/TableMetaParser.java       |  30 +
 .../internal/parsers/TableMetaParserImpl.java   |  79 ++
 .../parsers/TableMetaSectionParser.java         |  30 +
 .../hive20/internal/parsers/ViewInfoParser.java |  47 ++
 .../generators/AlterTableQueryGenerator.java    | 365 +++++++++
 .../generators/CreateTableQueryGenerator.java   | 165 ++++
 .../generators/DeleteTableQueryGenerator.java   |  67 ++
 .../query/generators/QueryGenerationUtils.java  | 151 ++++
 .../query/generators/QueryGenerator.java        |  26 +
 .../hive20/persistence/DataStoreStorage.java    | 140 ++++
 .../hive20/persistence/IStorageFactory.java     |  23 +
 .../persistence/InstanceKeyValueStorage.java    | 132 ++++
 .../hive20/persistence/KeyValueStorage.java     | 163 ++++
 .../persistence/LocalKeyValueStorage.java       |  69 ++
 .../persistence/PersistentConfiguration.java    |  52 ++
 .../ambari/view/hive20/persistence/Storage.java |  77 ++
 .../utils/ContextConfigurationAdapter.java      | 260 +++++++
 .../persistence/utils/FilteringStrategy.java    |  32 +
 .../view/hive20/persistence/utils/Indexed.java  |  36 +
 .../hive20/persistence/utils/ItemNotFound.java  |  43 ++
 .../utils/OnlyOwnersFilteringStrategy.java      |  38 +
 .../view/hive20/persistence/utils/Owned.java    |  36 +
 .../persistence/utils/PersonalResource.java     |  22 +
 .../persistence/utils/StorageFactory.java       |  69 ++
 .../hive20/resources/CRUDResourceManager.java   | 131 ++++
 .../view/hive20/resources/IResourceManager.java |  37 +
 .../resources/PersonalCRUDResourceManager.java  |  99 +++
 .../resources/SharedCRUDResourceManager.java    |  44 ++
 .../resources/browser/ConnectionService.java    | 155 ++++
 .../view/hive20/resources/browser/DDLProxy.java | 302 ++++++++
 .../hive20/resources/browser/DDLService.java    | 222 ++++++
 .../hive20/resources/browser/FileService.java   |  35 +
 .../resources/browser/HiveBrowserService.java   | 259 +++++++
 .../hive20/resources/files/FileResource.java    |  70 ++
 .../hive20/resources/files/FileService.java     | 266 +++++++
 .../view/hive20/resources/jobs/Aggregator.java  | 382 +++++++++
 .../resources/jobs/JobResourceProvider.java     | 117 +++
 .../view/hive20/resources/jobs/JobService.java  | 626 +++++++++++++++
 .../resources/jobs/JobServiceInternal.java      |  35 +
 .../jobs/ModifyNotificationDelegate.java        |  23 +
 .../ModifyNotificationInvocationHandler.java    |  40 +
 .../jobs/NoOperationStatusSetException.java     |  23 +
 .../resources/jobs/ProgressRetriever.java       |  66 ++
 .../jobs/ResultsPaginationController.java       | 286 +++++++
 .../resources/jobs/atsJobs/ATSParser.java       | 248 ++++++
 .../jobs/atsJobs/ATSParserFactory.java          |  39 +
 .../jobs/atsJobs/ATSRequestsDelegate.java       |  47 ++
 .../jobs/atsJobs/ATSRequestsDelegateImpl.java   | 175 +++++
 .../resources/jobs/atsJobs/HiveQueryId.java     |  42 +
 .../resources/jobs/atsJobs/IATSParser.java      |  39 +
 .../hive20/resources/jobs/atsJobs/TezDagId.java |  26 +
 .../resources/jobs/atsJobs/TezVertexId.java     |  24 +
 .../view/hive20/resources/jobs/rm/RMParser.java | 129 ++++
 .../resources/jobs/rm/RMParserFactory.java      |  48 ++
 .../resources/jobs/rm/RMRequestsDelegate.java   |  31 +
 .../jobs/rm/RMRequestsDelegateImpl.java         |  99 +++
 .../jobs/viewJobs/IJobControllerFactory.java    |  23 +
 .../hive20/resources/jobs/viewJobs/Job.java     | 131 ++++
 .../resources/jobs/viewJobs/JobController.java  |  44 ++
 .../jobs/viewJobs/JobControllerFactory.java     |  40 +
 .../jobs/viewJobs/JobControllerImpl.java        | 311 ++++++++
 .../hive20/resources/jobs/viewJobs/JobImpl.java | 335 ++++++++
 .../hive20/resources/jobs/viewJobs/JobInfo.java |  78 ++
 .../jobs/viewJobs/JobResourceManager.java       |  93 +++
 .../resources/resources/FileResourceItem.java   |  78 ++
 .../resources/FileResourceResourceManager.java  |  65 ++
 .../resources/FileResourceResourceProvider.java | 110 +++
 .../resources/FileResourceService.java          | 180 +++++
 .../resources/savedQueries/SavedQuery.java      |  96 +++
 .../savedQueries/SavedQueryResourceManager.java | 162 ++++
 .../SavedQueryResourceProvider.java             | 113 +++
 .../savedQueries/SavedQueryService.java         | 267 +++++++
 .../view/hive20/resources/settings/Setting.java |  71 ++
 .../settings/SettingsResourceManager.java       |  97 +++
 .../resources/settings/SettingsService.java     | 145 ++++
 .../hive20/resources/system/SystemService.java  |  48 ++
 .../ambari/view/hive20/resources/udfs/UDF.java  |  87 +++
 .../resources/udfs/UDFResourceManager.java      |  65 ++
 .../resources/udfs/UDFResourceProvider.java     | 111 +++
 .../view/hive20/resources/udfs/UDFService.java  | 193 +++++
 .../hive20/resources/uploads/CSVParams.java     |  74 ++
 .../uploads/ColumnDescriptionImpl.java          | 119 +++
 .../hive20/resources/uploads/HiveFileType.java  |  30 +
 .../resources/uploads/TableDataReader.java      | 111 +++
 .../hive20/resources/uploads/TableInput.java    |  51 ++
 .../resources/uploads/UploadFromHdfsInput.java  | 130 ++++
 .../hive20/resources/uploads/UploadService.java | 565 ++++++++++++++
 .../resources/uploads/parsers/DataParser.java   |  66 ++
 .../uploads/parsers/EndOfDocumentException.java |  41 +
 .../resources/uploads/parsers/IParser.java      |  32 +
 .../resources/uploads/parsers/ParseOptions.java |  61 ++
 .../resources/uploads/parsers/ParseUtils.java   | 213 +++++
 .../resources/uploads/parsers/Parser.java       | 161 ++++
 .../resources/uploads/parsers/PreviewData.java  |  56 ++
 .../resources/uploads/parsers/RowIterator.java  |  98 +++
 .../uploads/parsers/RowMapIterator.java         |  29 +
 .../parsers/csv/commonscsv/CSVIterator.java     |  57 ++
 .../parsers/csv/commonscsv/CSVParser.java       |  88 +++
 .../parsers/csv/opencsv/OpenCSVIterator.java    |  56 ++
 .../parsers/csv/opencsv/OpenCSVParser.java      |  92 +++
 .../uploads/parsers/json/JSONIterator.java      | 160 ++++
 .../uploads/parsers/json/JSONParser.java        |  85 ++
 .../uploads/parsers/xml/XMLIterator.java        | 195 +++++
 .../uploads/parsers/xml/XMLParser.java          | 100 +++
 .../uploads/query/DeleteQueryInput.java         |  48 ++
 .../uploads/query/InsertFromQueryInput.java     |  92 +++
 .../resources/uploads/query/LoadQueryInput.java |  67 ++
 .../resources/uploads/query/QueryGenerator.java | 143 ++++
 .../resources/uploads/query/RowFormat.java      |  57 ++
 .../resources/uploads/query/TableInfo.java      |  97 +++
 .../utils/BadRequestFormattedException.java     |  27 +
 .../ambari/view/hive20/utils/FilePaginator.java | 127 +++
 .../hive20/utils/HiveActorConfiguration.java    |  71 ++
 .../utils/HiveClientFormattedException.java     |  26 +
 .../view/hive20/utils/LoggingOutputStream.java  |  85 ++
 .../MisconfigurationFormattedException.java     |  47 ++
 .../utils/NotFoundFormattedException.java       |  27 +
 .../utils/ResultFetchFormattedException.java    |  27 +
 .../utils/ResultNotReadyFormattedException.java |  27 +
 .../hive20/utils/ServiceFormattedException.java | 105 +++
 .../view/hive20/utils/SharedObjectsFactory.java | 185 +++++
 .../UniqueConstraintViolationException.java     |  31 +
 .../hive20/src/main/resources/application.conf  |  57 ++
 .../views/hive20/src/main/resources/ui/.bowerrc |   4 +
 .../hive20/src/main/resources/ui/.editorconfig  |  20 +
 .../hive20/src/main/resources/ui/.ember-cli     |  27 +
 .../hive20/src/main/resources/ui/.gitignore     |  18 +
 .../hive20/src/main/resources/ui/.jshintrc      |  32 +
 .../hive20/src/main/resources/ui/.travis.yml    |  40 +
 .../src/main/resources/ui/.watchmanconfig       |  21 +
 .../hive20/src/main/resources/ui/README.md      |  28 +
 .../resources/ui/app/adapters/application.js    |  62 ++
 .../main/resources/ui/app/adapters/database.js  |  22 +
 .../src/main/resources/ui/app/adapters/ddl.js   |  26 +
 .../resources/ui/app/adapters/hdfs-viewer.js    |  26 +
 .../src/main/resources/ui/app/adapters/job.js   |  31 +
 .../src/main/resources/ui/app/adapters/ping.js  |  30 +
 .../src/main/resources/ui/app/adapters/query.js |  44 ++
 .../resources/ui/app/adapters/table-info.js     |  37 +
 .../src/main/resources/ui/app/adapters/table.js |  48 ++
 .../hive20/src/main/resources/ui/app/app.js     |  36 +
 .../src/main/resources/ui/app/breakpoints.js    |  23 +
 .../main/resources/ui/app/components/.gitkeep   |   0
 .../ui/app/components/alert-message-display.js  |  45 ++
 .../ui/app/components/alert-message.js          |  32 +
 .../resources/ui/app/components/column-item.js  |  71 ++
 .../resources/ui/app/components/create-table.js | 160 ++++
 .../ui/app/components/database-drop-confirm.js  |  32 +
 .../ui/app/components/database-not-empty.js     |  29 +
 .../ui/app/components/database-search-bar.js    |  78 ++
 .../ui/app/components/edit-setting-item.js      | 111 +++
 .../ui/app/components/hdfs-viewer-modal.js      |  51 ++
 .../resources/ui/app/components/job-item.js     |  38 +
 .../resources/ui/app/components/jobs-browser.js |  48 ++
 .../resources/ui/app/components/list-filter.js  |  45 ++
 .../resources/ui/app/components/list-group.js   |  23 +
 .../resources/ui/app/components/list-item.js    |  29 +
 .../components/multiple-database-search-bar.js  |  84 ++
 .../ui/app/components/property-item.js          |  43 ++
 .../resources/ui/app/components/query-editor.js | 100 +++
 .../ui/app/components/query-result-table.js     |  84 ++
 .../resources/ui/app/components/setting-item.js |  32 +
 .../resources/ui/app/components/setting-list.js |  28 +
 .../app/components/table-advanced-settings.js   | 168 ++++
 .../ui/app/components/table-columns.js          |  52 ++
 .../ui/app/components/table-properties.js       |  39 +
 .../resources/ui/app/components/tabs-item.js    |  61 ++
 .../resources/ui/app/components/tabs-pane.js    |  27 +
 .../ui/app/components/top-application-bar.js    |  22 +
 .../ui/app/configs/create-table-tabs.js         |  48 ++
 .../main/resources/ui/app/configs/datatypes.js  |  34 +
 .../resources/ui/app/configs/file-format.js     |  26 +
 .../main/resources/ui/app/configs/helpers.js    | 151 ++++
 .../resources/ui/app/configs/hive-parameters.js |  93 +++
 .../app/configs/non-printable-escape-chars.js   |  53 ++
 .../ui/app/configs/table-level-tabs.js          |  66 ++
 .../resources/ui/app/configs/top-level-tabs.js  |  67 ++
 .../main/resources/ui/app/controllers/.gitkeep  |   0
 .../main/resources/ui/app/controllers/jobs.js   |  37 +
 .../src/main/resources/ui/app/helpers/.gitkeep  |   0
 .../main/resources/ui/app/helpers/to-json.js    |  25 +
 .../hive20/src/main/resources/ui/app/index.html |  43 ++
 .../main/resources/ui/app/initializers/.gitkeep |   0
 .../resources/ui/app/initializers/responsive.js |  33 +
 .../resources/ui/app/mixins/table-common.js     |  86 +++
 .../src/main/resources/ui/app/models/.gitkeep   |   0
 .../src/main/resources/ui/app/models/alert.js   |  28 +
 .../src/main/resources/ui/app/models/column.js  | 107 +++
 .../main/resources/ui/app/models/database.js    |  25 +
 .../src/main/resources/ui/app/models/file.js    |  26 +
 .../src/main/resources/ui/app/models/job.js     |  56 ++
 .../src/main/resources/ui/app/models/setting.js |  25 +
 .../main/resources/ui/app/models/table-info.js  |  30 +
 .../resources/ui/app/models/table-property.js   |  54 ++
 .../src/main/resources/ui/app/models/table.js   |  33 +
 .../src/main/resources/ui/app/resolver.js       |  21 +
 .../hive20/src/main/resources/ui/app/router.js  |  59 ++
 .../src/main/resources/ui/app/routes/.gitkeep   |   0
 .../main/resources/ui/app/routes/application.js |  32 +
 .../main/resources/ui/app/routes/databases.js   |  96 +++
 .../ui/app/routes/databases/database.js         |  25 +
 .../ui/app/routes/databases/database/index.js   |  27 +
 .../ui/app/routes/databases/database/tables.js  |  47 ++
 .../routes/databases/database/tables/index.js   |  28 +
 .../app/routes/databases/database/tables/new.js |  71 ++
 .../routes/databases/database/tables/table.js   |  44 ++
 .../databases/database/tables/table/columns.js  |  23 +
 .../databases/database/tables/table/ddl.js      |  22 +
 .../databases/database/tables/table/details.js  |  22 +
 .../databases/database/tables/table/index.js    |  25 +
 .../database/tables/table/partitions.js         |  22 +
 .../databases/database/tables/table/stats.js    |  22 +
 .../databases/database/tables/table/storage.js  |  22 +
 .../database/tables/table/table-meta-router.js  |  27 +
 .../databases/database/tables/table/view.js     |  22 +
 .../resources/ui/app/routes/databases/index.js  |  26 +
 .../src/main/resources/ui/app/routes/jobs.js    |  69 ++
 .../main/resources/ui/app/routes/messages.js    |  32 +
 .../resources/ui/app/routes/messages/message.js |  31 +
 .../src/main/resources/ui/app/routes/query.js   | 280 +++++++
 .../resources/ui/app/routes/savedqueries.js     |  22 +
 .../main/resources/ui/app/routes/settings.js    |  93 +++
 .../src/main/resources/ui/app/routes/udfs.js    |  22 +
 .../main/resources/ui/app/serializers/file.js   |  23 +
 .../resources/ui/app/services/alert-messages.js | 144 ++++
 .../resources/ui/app/services/create-table.js   | 182 +++++
 .../src/main/resources/ui/app/services/jobs.js  |  28 +
 .../resources/ui/app/services/keep-alive.js     |  31 +
 .../src/main/resources/ui/app/services/query.js |  45 ++
 .../src/main/resources/ui/app/styles/app.scss   | 774 +++++++++++++++++++
 .../ui/app/styles/bootstrap-overrides.scss      |  51 ++
 .../src/main/resources/ui/app/styles/fonts.scss |  31 +
 .../ui/app/styles/power-select-overrides.scss   |  24 +
 .../resources/ui/app/templates/application.hbs  |  42 +
 .../ui/app/templates/components/.gitkeep        |   0
 .../components/alert-message-display.hbs        |  34 +
 .../app/templates/components/alert-message.hbs  |  34 +
 .../ui/app/templates/components/column-item.hbs | 124 +++
 .../app/templates/components/create-table.hbs   |  63 ++
 .../components/database-drop-confirm.hbs        |  37 +
 .../templates/components/database-not-empty.hbs |  38 +
 .../components/database-search-bar.hbs          |  57 ++
 .../templates/components/edit-setting-item.hbs  |  42 +
 .../templates/components/hdfs-viewer-modal.hbs  |  50 ++
 .../ui/app/templates/components/job-item.hbs    |  33 +
 .../app/templates/components/jobs-browser.hbs   |  62 ++
 .../ui/app/templates/components/list-filter.hbs |  33 +
 .../ui/app/templates/components/list-group.hbs  |  22 +
 .../ui/app/templates/components/list-item.hbs   |  22 +
 .../components/multiple-database-search-bar.hbs |  45 ++
 .../app/templates/components/property-item.hbs  |  45 ++
 .../app/templates/components/query-editor.hbs   |  21 +
 .../templates/components/query-result-table.hbs |  53 ++
 .../app/templates/components/setting-item.hbs   |  28 +
 .../app/templates/components/setting-list.hbs   |  37 +
 .../components/table-advanced-settings.hbs      | 226 ++++++
 .../app/templates/components/table-columns.hbs  |  42 +
 .../templates/components/table-properties.hbs   |  40 +
 .../ui/app/templates/components/tabs-item.hbs   |  24 +
 .../ui/app/templates/components/tabs-pane.hbs   |  21 +
 .../components/top-application-bar.hbs          |  25 +
 .../resources/ui/app/templates/databases.hbs    |  39 +
 .../ui/app/templates/databases/database.hbs     |  19 +
 .../app/templates/databases/database/tables.hbs |  45 ++
 .../templates/databases/database/tables/new.hbs |  42 +
 .../databases/database/tables/table.hbs         |  42 +
 .../databases/database/tables/table/columns.hbs |  38 +
 .../databases/database/tables/table/ddl.hbs     |  27 +
 .../databases/database/tables/table/details.hbs |  65 ++
 .../database/tables/table/partitions.hbs        |  38 +
 .../databases/database/tables/table/stats.hbs   |  17 +
 .../databases/database/tables/table/storage.hbs |  64 ++
 .../databases/database/tables/table/view.hbs    |  40 +
 .../main/resources/ui/app/templates/jobs.hbs    |  23 +
 .../resources/ui/app/templates/messages.hbs     |  54 ++
 .../ui/app/templates/messages/message.hbs       |  48 ++
 .../main/resources/ui/app/templates/query.hbs   |  88 +++
 .../resources/ui/app/templates/savedqueries.hbs |  19 +
 .../resources/ui/app/templates/settings.hbs     |  32 +
 .../main/resources/ui/app/templates/tables.hbs  |  19 +
 .../main/resources/ui/app/templates/udfs.hbs    |  19 +
 .../ui/app/utils/hdfs-picker-config.js          |  32 +
 .../hive20/src/main/resources/ui/bower.json     |  11 +
 .../src/main/resources/ui/config/environment.js |  64 ++
 .../src/main/resources/ui/ember-cli-build.js    |  68 ++
 .../hive20/src/main/resources/ui/package.json   |  64 ++
 .../fonts/roboto/roboto-v15-latin-300.eot       | Bin 0 -> 16205 bytes
 .../fonts/roboto/roboto-v15-latin-300.svg       | 314 ++++++++
 .../fonts/roboto/roboto-v15-latin-300.ttf       | Bin 0 -> 32664 bytes
 .../fonts/roboto/roboto-v15-latin-300.woff      | Bin 0 -> 18520 bytes
 .../fonts/roboto/roboto-v15-latin-300.woff2     | Bin 0 -> 14524 bytes
 .../fonts/roboto/roboto-v15-latin-regular.eot   | Bin 0 -> 16227 bytes
 .../fonts/roboto/roboto-v15-latin-regular.svg   | 308 ++++++++
 .../fonts/roboto/roboto-v15-latin-regular.ttf   | Bin 0 -> 32652 bytes
 .../fonts/roboto/roboto-v15-latin-regular.woff  | Bin 0 -> 18520 bytes
 .../fonts/roboto/roboto-v15-latin-regular.woff2 | Bin 0 -> 14584 bytes
 .../hive20/src/main/resources/ui/testem.js      |  31 +
 .../src/main/resources/ui/tests/.jshintrc       |  53 ++
 .../resources/ui/tests/helpers/destroy-app.js   |  23 +
 .../resources/ui/tests/helpers/flash-message.js |  24 +
 .../ui/tests/helpers/module-for-acceptance.js   |  41 +
 .../main/resources/ui/tests/helpers/resolver.js |  29 +
 .../resources/ui/tests/helpers/responsive.js    |  76 ++
 .../resources/ui/tests/helpers/start-app.js     |  36 +
 .../src/main/resources/ui/tests/index.html      |  51 ++
 .../resources/ui/tests/integration/.gitkeep     |   0
 .../src/main/resources/ui/tests/test-helper.js  |  28 +
 .../src/main/resources/ui/tests/unit/.gitkeep   |   0
 .../src/main/resources/ui/vendor/.gitkeep       |   0
 .../src/main/resources/view.log4j.properties    |  27 +
 .../views/hive20/src/main/resources/view.xml    | 362 +++++++++
 .../view/hive20/AsyncJobRunnerImplTest.java     | 138 ++++
 .../view/hive20/ConnectionFailuresTest.java     | 157 ++++
 .../hive20/HiveJdbcConnectionDelegateTest.java  | 105 +++
 .../ambari/view/hive20/JobExecutionTest.java    | 116 +++
 .../view/hive20/PropertyValidatorTest.java      | 113 +++
 .../view/hive20/ResultSetIteratorTest.java      | 100 +++
 .../internal/parsers/ParserUtilsTest.java       |  66 ++
 .../AlterTableQueryGenerationSpecTest.groovy    |  59 ++
 .../AlterTableQueryGeneratorTest.java           |  94 +++
 .../CreateTableQueryGeneratorTest.java          | 101 +++
 .../hive20/resources/upload/CSVParserTest.java  | 275 +++++++
 .../resources/upload/DataParserCSVTest.java     | 326 ++++++++
 .../resources/upload/DataParserJSONTest.java    | 263 +++++++
 .../resources/upload/DataParserXMLTest.java     | 295 +++++++
 .../hive20/resources/upload/JsonParserTest.java | 146 ++++
 .../resources/upload/OpenCSVParserTest.java     | 313 ++++++++
 .../hive20/resources/upload/OpenCSVTest.java    | 245 ++++++
 .../hive20/resources/upload/ParseUtilsTest.java |  56 ++
 .../resources/upload/QueryGeneratorTest.java    | 108 +++
 .../resources/upload/TableDataReaderTest.java   | 127 +++
 .../hive20/resources/upload/XMLParserTest.java  | 135 ++++
 .../rest/postman/hive2.postman_collection.json  | 107 +++
 contrib/views/pom.xml                           |   1 +
 pom.xml                                         |   2 +
 461 files changed, 36270 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive-next/src/main/resources/view.log4j.properties
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/view.log4j.properties b/contrib/views/hive-next/src/main/resources/view.log4j.properties
index 2e466cd..3de9afe 100644
--- a/contrib/views/hive-next/src/main/resources/view.log4j.properties
+++ b/contrib/views/hive-next/src/main/resources/view.log4j.properties
@@ -24,8 +24,4 @@ log4j.appender.hiveNextView.layout=org.apache.log4j.PatternLayout
 log4j.appender.hiveNextView.layout.ConversionPattern=%d{DATE} %5p [%t] [%X{viewName} %X{viewVersion} %X{viewInstanceName}] %c{1}:%L - %m%n
 
 log4j.logger.org.apache.ambari.view.hive2=INFO,hiveNextView
-log4j.logger.org.apache.hive.jdbc=INFO,hiveNextView
-log4j.logger.akka.actor=INFO,hiveNextView
-log4j.additivity.org.apache.ambari.view.hive2 = false
-log4j.additivity.org.apache.hive.jdbc = false
-log4j.additivity.akka.actor = false
+log4j.additivity.org.apache.ambari.view.hive2 = false
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/pom.xml b/contrib/views/hive20/pom.xml
new file mode 100644
index 0000000..e9bde1d
--- /dev/null
+++ b/contrib/views/hive20/pom.xml
@@ -0,0 +1,431 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.ambari.contrib.views</groupId>
+  <artifactId>hive20</artifactId>
+  <version>2.0.0.0-SNAPSHOT</version>
+  <name>Hive 2.0</name>
+
+  <parent>
+    <groupId>org.apache.ambari.contrib.views</groupId>
+    <artifactId>ambari-contrib-views</artifactId>
+    <version>2.0.0.0-SNAPSHOT</version>
+  </parent>
+
+  <dependencies>
+    <dependency>
+      <groupId>com.jayway.jsonpath</groupId>
+      <artifactId>json-path</artifactId>
+      <version>2.0.0</version>
+    </dependency>
+      <dependency>
+      <groupId>com.google.inject</groupId>
+      <artifactId>guice</artifactId>
+      <version>4.1.0</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey.contribs</groupId>
+      <artifactId>jersey-multipart</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-core</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-json</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.googlecode.json-simple</groupId>
+      <artifactId>json-simple</artifactId>
+      <version>1.1.1</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-configuration</groupId>
+      <artifactId>commons-configuration</artifactId>
+      <version>1.6</version>
+    </dependency>
+    <dependency>
+      <groupId>com.opencsv</groupId>
+      <artifactId>opencsv</artifactId>
+      <version>3.8</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-collections4</artifactId>
+      <version>4.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-views</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+      <version>2.2.2</version>
+    </dependency>
+    <dependency>
+      <groupId>javax.servlet</groupId>
+      <artifactId>javax.servlet-api</artifactId>
+      <version>3.0.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>1.7.5</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-simple</artifactId>
+      <version>1.7.5</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+        <exclusions>
+            <exclusion>
+                <groupId>tomcat</groupId>
+                <artifactId>jasper-runtime</artifactId>
+            </exclusion>
+        </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-compiler</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-common</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-client</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>javax.ws.rs</groupId>
+      <artifactId>javax.ws.rs-api</artifactId>
+      <version>2.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-jdbc</artifactId>
+      <version>${hive-version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-compiler</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+      <version>1.2</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+      <version>2.2</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.thrift</groupId>
+      <artifactId>libthrift</artifactId>
+      <version>0.9.0</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.typesafe.akka</groupId>
+      <artifactId>akka-testkit_2.11</artifactId>
+      <version>2.3.15</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari.contrib.views</groupId>
+      <artifactId>ambari-views-utils</artifactId>
+      <version>2.0.0.0-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-validator</groupId>
+      <artifactId>commons-validator</artifactId>
+      <version>1.4.0</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>2.4</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpclient</artifactId>
+      <version>4.5.2</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpcore</artifactId>
+      <version>4.4.3</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-csv</artifactId>
+      <version>1.1</version>
+    </dependency>
+    <dependency>
+      <groupId>com.typesafe.akka</groupId>
+      <artifactId>akka-actor_2.11</artifactId>
+      <version>2.3.15</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari.contrib.views</groupId>
+      <artifactId>ambari-views-commons</artifactId>
+      <version>2.0.0.0-SNAPSHOT</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.codehaus.groovy</groupId>
+      <artifactId>groovy-all</artifactId>
+      <version>2.4.4</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.spockframework</groupId>
+      <artifactId>spock-core</artifactId>
+      <version>1.0-groovy-2.4</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <properties>
+    <ambari.dir>../../..</ambari.dir>
+    <hive-version>2.1.0</hive-version>
+    <ambari.version>1.3.0.0-SNAPSHOT</ambari.version>
+  </properties>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.gmavenplus</groupId>
+        <artifactId>gmavenplus-plugin</artifactId>
+        <version>1.4</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>generateStubs</goal>
+              <goal>compile</goal>
+              <goal>testGenerateStubs</goal>
+              <goal>testCompile</goal>
+            </goals>
+          </execution>
+        </executions>
+        <dependencies>
+          <dependency>
+            <groupId>org.codehaus.groovy</groupId>
+            <artifactId>groovy-all</artifactId>
+            <version>2.4.4</version>
+            <scope>runtime</scope>
+          </dependency>
+        </dependencies>
+      </plugin>
+      <!-- Building frontend -->
+      <plugin>
+        <groupId>com.github.eirslett</groupId>
+        <artifactId>frontend-maven-plugin</artifactId>
+        <version>0.0.16</version>
+        <configuration>
+          <nodeVersion>v4.5.0</nodeVersion>
+          <npmVersion>2.15.0</npmVersion>
+          <workingDirectory>src/main/resources/ui/</workingDirectory>
+        </configuration>
+        <executions>
+          <execution>
+            <id>install node and npm</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>install-node-and-npm</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>npm install</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>npm</goal>
+            </goals>
+            <configuration>
+              <arguments>install --python="${project.basedir}/../src/main/unix/ambari-python-wrap" --unsafe-perm</arguments>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>exec-maven-plugin</artifactId>
+        <groupId>org.codehaus.mojo</groupId>
+        <version>1.3.2</version>
+        <executions>
+          <execution>
+            <id>Hive build</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>exec</goal>
+            </goals>
+            <configuration>
+              <workingDirectory>${basedir}/src/main/resources/ui</workingDirectory>
+              <executable>node/node</executable>
+              <arguments>
+                <argument>node_modules/.bin/ember</argument>
+                <argument>build</argument>
+                <argument>--environment=production</argument>
+              </arguments>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>3.1</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>generate-resources</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>${project.build.directory}/lib</outputDirectory>
+              <includeScope>runtime</includeScope>
+            </configuration>
+          </execution>
+          <execution>
+            <id>copy-artifact</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy</goal>
+            </goals>
+            <configuration>
+              <artifactItems>
+                <artifactItem>
+                  <groupId>${project.groupId}</groupId>
+                  <artifactId>${project.artifactId}</artifactId>
+                  <version>${project.version}</version>
+                  <type>${project.packaging}</type>
+                </artifactItem>
+              </artifactItems>
+              <outputDirectory>${views.jars.dir.rel}</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+       <groupId>org.vafer</groupId>
+       <artifactId>jdeb</artifactId>
+       <version>1.0.1</version>
+       <executions>
+           <execution>
+               <phase>none</phase>
+               <goals>
+                   <goal>jdeb</goal>
+               </goals>
+           </execution>
+       </executions>
+       <configuration>
+           <submodules>false</submodules>
+       </configuration>
+     </plugin>
+    </plugins>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>false</filtering>
+        <includes>
+          <include>META-INF/**/*</include>
+          <include>view.xml</include>
+          <include>view.log4j.properties</include>
+          <include>application.conf</include>
+        </includes>
+      </resource>
+      <resource>
+        <directory>src/main/resources/ui/dist</directory>
+        <filtering>false</filtering>
+      </resource>
+      <resource>
+        <directory>src/main/resources/ui/bower_components/polestar</directory>
+        <filtering>false</filtering>
+        <targetPath>polestar</targetPath>
+      </resource>
+      <resource>
+        <directory>src/main/resources/ui/bower_components/voyager</directory>
+        <filtering>false</filtering>
+        <targetPath>voyager</targetPath>
+      </resource>
+      <resource>
+        <targetPath>WEB-INF/lib</targetPath>
+        <filtering>false</filtering>
+        <directory>target/lib</directory>
+      </resource>
+    </resources>
+  </build>
+</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/AuthParams.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/AuthParams.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/AuthParams.java
new file mode 100644
index 0000000..98e6caf
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/AuthParams.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Holds session parameters pulled from the
+ * view context
+ */
+public class AuthParams {
+  private static final String HIVE_SESSION_PARAMS = "hive.session.params";
+  private Map<String, String> sessionParams = new HashMap<>();
+  private final ViewContext context;
+
+  public AuthParams(ViewContext context) {
+    sessionParams = parseSessionParams(context.getProperties().get(HIVE_SESSION_PARAMS));
+    this.context = context;
+  }
+
+  /**
+   * Returns a map created by parsing the parameters in view context
+   * @param params session parameters as string
+   * @return parsed session parameters
+   */
+  private Map<String, String> parseSessionParams(String params) {
+    Map<String, String> sessions = new HashMap<>();
+    if (StringUtils.isEmpty(params))
+      return sessions;
+    String[] splits = params.split(";");
+    for (String split : splits) {
+      String[] paramSplit = split.trim().split("=");
+      if ("auth".equals(paramSplit[0]) || "proxyuser".equals(paramSplit[0])) {
+        sessions.put(paramSplit[0], paramSplit[1]);
+      }
+    }
+    return Collections.unmodifiableMap(sessions);
+  }
+
+  /**
+   * Gets the proxy user
+   * @return User and group information
+   * @throws IOException
+   */
+  public UserGroupInformation getProxyUser() throws IOException {
+    UserGroupInformation ugi;
+    String proxyuser = null;
+    if(context.getCluster() != null) {
+      proxyuser = context.getCluster().getConfigurationValue("cluster-env","ambari_principal_name");
+    }
+
+    if(StringUtils.isEmpty(proxyuser)) {
+      if (sessionParams.containsKey("proxyuser")) {
+        ugi = UserGroupInformation.createRemoteUser(sessionParams.get("proxyuser"));
+      } else {
+        ugi = UserGroupInformation.getCurrentUser();
+      }
+    } else {
+      ugi = UserGroupInformation.createRemoteUser(proxyuser);
+    }
+    ugi.setAuthenticationMethod(getAuthenticationMethod());
+    return ugi;
+  }
+
+  /**
+   * Get the Authentication method
+   * @return
+   */
+  private UserGroupInformation.AuthenticationMethod getAuthenticationMethod() {
+    UserGroupInformation.AuthenticationMethod authMethod;
+    if (sessionParams.containsKey("auth") && !StringUtils.isEmpty(sessionParams.get("auth"))) {
+      String authName = sessionParams.get("auth");
+      authMethod = UserGroupInformation.AuthenticationMethod.valueOf(authName.toUpperCase());
+    } else {
+      authMethod = UserGroupInformation.AuthenticationMethod.SIMPLE;
+    }
+    return authMethod;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/BaseService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/BaseService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/BaseService.java
new file mode 100644
index 0000000..4d16340
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/BaseService.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+
+
+/**
+ * Parent service
+ */
+public class BaseService {
+  @Inject
+  protected ViewContext context;
+
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(BaseService.class);
+
+  private SharedObjectsFactory sharedObjectsFactory;
+  public SharedObjectsFactory getSharedObjectsFactory() {
+    if (sharedObjectsFactory == null) {
+      sharedObjectsFactory = new SharedObjectsFactory(context);
+    }
+    return sharedObjectsFactory;
+  }
+
+  public void setSharedObjectsFactory(SharedObjectsFactory sharedObjectsFactory) {
+    this.sharedObjectsFactory = sharedObjectsFactory;
+  }
+
+  public BaseService() {
+//    Thread.currentThread().setContextClassLoader(null);
+  }
+
+  protected ConnectionConfig getHiveConnectionConfig() {
+    return ConnectionFactory.create(context);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionDelegate.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionDelegate.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionDelegate.java
new file mode 100644
index 0000000..f3c4e91
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionDelegate.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.actor.message.GetColumnMetadataJob;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.HiveStatement;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+public interface ConnectionDelegate {
+  HiveStatement createStatement(HiveConnection connection) throws SQLException;
+  Optional<ResultSet> execute(String statement) throws SQLException;
+  Optional<ResultSet> execute(HiveConnection connection, String statement) throws SQLException;
+  ResultSet getColumnMetadata(HiveConnection connection, GetColumnMetadataJob job) throws SQLException;
+  void cancel() throws SQLException;
+  void closeResultSet();
+  void closeStatement();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionFactory.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionFactory.java
new file mode 100644
index 0000000..9e36a31
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionFactory.java
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.base.Strings;
+import com.google.common.collect.FluentIterable;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+
+import java.util.List;
+
+public class ConnectionFactory {
+
+  private static final String ZK_HIVE_DYN_SERVICE_DISCOVERY_KEY = "hive.server2.support.dynamic.service.discovery";
+  private static final String ZK_HIVE_NAMESPACE_KEY = "hive.server2.zookeeper.namespace";
+  private static final String ZK_HIVE_QUORUM = "hive.zookeeper.quorum";
+
+  private static final String AMBARI_HIVE_SERVICE_NAME = "HIVE";
+  private static final String AMBARI_HIVESERVER_COMPONENT_NAME = "HIVE_SERVER";
+
+  private static final String HIVE_SITE = "hive-site";
+  private static final String HIVE_INTERACTIVE_SITE = "hive-interactive-site";
+
+  private static final String HIVE_JDBC_URL_KEY = "hive.jdbc.url";
+  private static final String HIVE_SESSION_PARAMS = "hive.session.params";
+  private static final String HIVE_LDAP_CONFIG = "hive.ldap.configured";
+
+  private static final String BINARY_PORT_KEY = "hive.server2.thrift.port";
+  private static final String HIVE_AUTH_MODE = "hive.server2.authentication";
+  private static final String HTTP_PORT_KEY = "hive.server2.thrift.http.port";
+  private static final String HIVE_TRANSPORT_MODE_KEY = "hive.server2.transport.mode";
+  private static final String HTTP_PATH_KEY = "hive.server2.thrift.http.path";
+  private static final String HS2_PROXY_USER = "hive.server2.proxy.user";
+  private static final String USE_HIVE_INTERACTIVE_MODE = "use.hive.interactive.mode";
+
+  public static boolean isLdapEnabled(ViewContext context){
+    if (context.getCluster() == null) {
+      return context.getProperties().get(HIVE_LDAP_CONFIG).equalsIgnoreCase("true");
+    }
+    return context.getCluster().getConfigurationValue(HIVE_SITE,HIVE_AUTH_MODE).equalsIgnoreCase("ldap");
+  }
+
+  public static ConnectionConfig create(ViewContext context)  {
+
+    String jdbcUrl;
+    if (context.getCluster() == null) {
+      jdbcUrl = getConnectFromCustom(context);
+    } else {
+      if (zookeeperConfigured(context)) {
+        jdbcUrl = getFromClusterZookeeperConfig(context);
+      } else {
+        jdbcUrl = getFromHiveConfiguration(context);
+      }
+    }
+
+    String userName = context.getUsername();
+    if(isLdapEnabled(context)){
+      Optional<String> opPassword = ConnectionSystem.getInstance().getPassword(context);
+      if(opPassword.isPresent()){
+        return new ConnectionConfig(userName, opPassword.get(), jdbcUrl);
+      }
+    }
+    return new ConnectionConfig(userName, "", jdbcUrl);
+  }
+
+
+  private static String getFromHiveConfiguration(ViewContext context) {
+    boolean useLLAP = Boolean.valueOf(context.getProperties().get(USE_HIVE_INTERACTIVE_MODE));
+    String transportMode = context.getCluster().getConfigurationValue(HIVE_SITE, HIVE_TRANSPORT_MODE_KEY);
+    String binaryPort = context.getCluster().getConfigurationValue(HIVE_SITE, BINARY_PORT_KEY);
+    String httpPort = context.getCluster().getConfigurationValue(HIVE_SITE, HTTP_PORT_KEY);
+    if (useLLAP) {
+      binaryPort = context.getCluster().getConfigurationValue(HIVE_INTERACTIVE_SITE, BINARY_PORT_KEY);
+      httpPort = context.getCluster().getConfigurationValue(HIVE_INTERACTIVE_SITE, HTTP_PORT_KEY);
+    }
+
+
+    String pathKey = context.getCluster().getConfigurationValue(HIVE_SITE, HTTP_PATH_KEY);
+    List<String> hiveHosts = context.getCluster().getHostsForServiceComponent(AMBARI_HIVE_SERVICE_NAME, AMBARI_HIVESERVER_COMPONENT_NAME);
+    String sessionParams = context.getProperties().get(HIVE_SESSION_PARAMS);
+
+    boolean isBinary = transportMode.equalsIgnoreCase("binary");
+    final String port = isBinary ? binaryPort : httpPort;
+
+    List<String> hostPorts = FluentIterable.from(hiveHosts).transform(new Function<String, String>() {
+      @Override
+      public String apply(String input) {
+        return input + ":" + port;
+      }
+    }).toList();
+
+    String concatHostPorts = Joiner.on(",").join(hostPorts);
+
+    StringBuilder builder = new StringBuilder();
+    builder.append("jdbc:hive2://")
+        .append(concatHostPorts);
+    if(!Strings.isNullOrEmpty(sessionParams)) {
+      builder.append(";").append(sessionParams);
+    }
+
+    if (!isBinary) {
+      builder.append(";").append("transportMode=http;httpPath=").append(pathKey);
+    }
+
+    return builder.toString();
+  }
+
+  private static String getFromClusterZookeeperConfig(ViewContext context) {
+    boolean useLLAP = Boolean.valueOf(context.getProperties().get(USE_HIVE_INTERACTIVE_MODE));
+    String quorum = context.getCluster().getConfigurationValue(HIVE_SITE, ZK_HIVE_QUORUM);
+
+    String namespace = context.getCluster().getConfigurationValue(HIVE_SITE, ZK_HIVE_NAMESPACE_KEY);
+    if (useLLAP) {
+      namespace = context.getCluster().getConfigurationValue(HIVE_INTERACTIVE_SITE, ZK_HIVE_NAMESPACE_KEY);
+    }
+
+    String sessionParams = context.getProperties().get(HIVE_SESSION_PARAMS);
+
+    String formatted = String.format("jdbc:hive2://%s/;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=%s", quorum, namespace);
+    if (Strings.isNullOrEmpty(sessionParams)) {
+      sessionParams = "";
+    }
+
+    if (!sessionParams.contains(HS2_PROXY_USER)) {
+      if (!sessionParams.isEmpty()) {
+        sessionParams += ";";
+      }
+      sessionParams = sessionParams + HS2_PROXY_USER + "=" + context.getUsername();
+    }
+
+    if (sessionParams.isEmpty()) {
+      return formatted;
+    }
+    return formatted + ";" + sessionParams;
+  }
+
+  private static boolean zookeeperConfigured(ViewContext context) {
+    boolean fromHiveSite = Boolean.valueOf(context.getCluster().getConfigurationValue(HIVE_SITE, ZK_HIVE_DYN_SERVICE_DISCOVERY_KEY));
+    boolean fromHiveInteractiveSite = Boolean.valueOf(context.getCluster().getConfigurationValue(HIVE_INTERACTIVE_SITE, ZK_HIVE_DYN_SERVICE_DISCOVERY_KEY));
+    return fromHiveInteractiveSite || fromHiveSite;
+  }
+
+  private static String getConnectFromCustom(ViewContext context) {
+    String jdbcUrl = context.getProperties().get(HIVE_JDBC_URL_KEY);
+    String hiveSessionParams = context.getProperties().get(HIVE_SESSION_PARAMS);
+    return jdbcUrl + ";" + hiveSessionParams;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java
new file mode 100644
index 0000000..40b4c04
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/ConnectionSystem.java
@@ -0,0 +1,158 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Inbox;
+import akka.actor.PoisonPill;
+import akka.actor.Props;
+import com.google.common.base.Optional;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.actor.DeathWatch;
+import org.apache.ambari.view.hive20.actor.MetaDataManager;
+import org.apache.ambari.view.hive20.actor.OperationController;
+import org.apache.ambari.view.hive20.internal.ConnectionSupplier;
+import org.apache.ambari.view.hive20.internal.DataStorageSupplier;
+import org.apache.ambari.view.hive20.internal.HdfsApiSupplier;
+import org.apache.ambari.view.hive20.internal.SafeViewContext;
+import org.apache.parquet.Strings;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class ConnectionSystem {
+
+  private static final String ACTOR_SYSTEM_NAME = "HiveViewActorSystem";
+  private ActorSystem actorSystem = null;
+  private static volatile ConnectionSystem instance = null;
+  private static final Object lock = new Object();
+  private static Map<String, Map<String, ActorRef>> operationControllerMap = new ConcurrentHashMap<>();
+  private final Map<String, ActorRef> metaDataManagerMap = new ConcurrentHashMap<>();
+
+  // credentials map stores usernames and passwords
+  private static Map<String, String> credentialsMap = new ConcurrentHashMap<>();
+
+  private ConnectionSystem() {
+    ClassLoader classLoader = getClass().getClassLoader();
+    Config config = ConfigFactory.load(classLoader);
+    this.actorSystem = ActorSystem.create(ACTOR_SYSTEM_NAME, config, classLoader);
+  }
+
+  public static ConnectionSystem getInstance() {
+    if (instance == null) {
+      synchronized (lock) {
+        if (instance == null) {
+          instance = new ConnectionSystem();
+        }
+      }
+    }
+    return instance;
+  }
+
+  private ActorRef createOperationController(ViewContext context) {
+    ActorRef deathWatch = actorSystem.actorOf(Props.create(DeathWatch.class));
+    return actorSystem.actorOf(
+        Props.create(OperationController.class, actorSystem, deathWatch, context,
+            new ConnectionSupplier(), new DataStorageSupplier(), new HdfsApiSupplier()));
+  }
+
+  public ActorSystem getActorSystem() {
+    return actorSystem;
+  }
+
+  /**
+   * Returns one operationController per View Instance
+   *
+   * @param viewContext
+   * @return operationController Instance
+   */
+  public synchronized ActorRef getOperationController(ViewContext viewContext) {
+    SafeViewContext context = new SafeViewContext(viewContext);
+    String instanceName = context.getInstanceName();
+    ActorRef ref = null;
+    Map<String, ActorRef> stringActorRefMap = operationControllerMap.get(instanceName);
+    if (stringActorRefMap != null) {
+      ref = stringActorRefMap.get(context.getUsername());
+    }
+    if (ref == null) {
+      ref = createOperationController(context);
+      if (stringActorRefMap == null) {
+        stringActorRefMap = new HashMap<>();
+        stringActorRefMap.put(context.getUsername(), ref);
+        operationControllerMap.put(instanceName, stringActorRefMap);
+      } else {
+        stringActorRefMap.put(context.getUsername(), ref);
+      }
+    }
+    return ref;
+  }
+
+  /**
+   * Returns one MetaDataManager actor per view instance
+   * @param context - View context
+   * @return MetaDataManager actor
+   */
+  public synchronized ActorRef getMetaDataManager(ViewContext context) {
+    SafeViewContext safeViewContext = new SafeViewContext(context);
+    String instanceName = safeViewContext.getInstanceName();
+    ActorRef metaDataManager = metaDataManagerMap.get(instanceName);
+    if(metaDataManager == null) {
+      metaDataManager = createMetaDataManager(safeViewContext);
+      metaDataManagerMap.put(instanceName, metaDataManager);
+    }
+
+    return metaDataManager;
+  }
+
+  private ActorRef createMetaDataManager(SafeViewContext safeViewContext) {
+    return actorSystem.actorOf(MetaDataManager.props(safeViewContext));
+  }
+
+  public synchronized void persistCredentials(String user,String password){
+    if(!Strings.isNullOrEmpty(password)){
+      credentialsMap.put(user,password);
+    }
+  }
+
+
+  public synchronized Optional<String> getPassword(ViewContext viewContext){
+    String pass = credentialsMap.get(viewContext.getUsername());
+    return Optional.fromNullable(pass);
+  }
+
+  public void removeOperationControllerFromCache(String viewInstanceName) {
+    Map<String, ActorRef> refs = operationControllerMap.remove(viewInstanceName);
+    if (refs != null) {
+      for (ActorRef ref : refs.values()) {
+        Inbox inbox = Inbox.create(getActorSystem());
+        inbox.send(ref, PoisonPill.getInstance());
+      }
+    }
+  }
+
+  public void shutdown() {
+    if (!actorSystem.isTerminated()) {
+      actorSystem.shutdown();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/DataMigrator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/DataMigrator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/DataMigrator.java
new file mode 100644
index 0000000..ae0a958
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/DataMigrator.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.migration.ViewDataMigrationContext;
+import org.apache.ambari.view.migration.ViewDataMigrationException;
+import org.apache.ambari.view.migration.ViewDataMigrator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Data migrator that maps persistence entities names
+ * from Hive 1 to entities of Hive 2.
+ */
+public class DataMigrator implements ViewDataMigrator {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(DataMigrator.class);
+
+  /**
+   * The view context of target migration instance.
+   */
+  @Inject
+  private ViewContext viewContext;
+
+  /**
+   * The migration context.
+   */
+  @Inject
+  private ViewDataMigrationContext migrationContext;
+
+  private static final Map<String, Class> hive1EntitiesMapping;
+  static
+  {
+    hive1EntitiesMapping = new HashMap<>();
+    hive1EntitiesMapping.put("org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl",
+        org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl.class);
+    hive1EntitiesMapping.put("org.apache.ambari.view.hive.resources.savedQueries.SavedQuery",
+        org.apache.ambari.view.hive20.resources.savedQueries.SavedQuery.class);
+    hive1EntitiesMapping.put("org.apache.ambari.view.hive.resources.udfs.UDF",
+        org.apache.ambari.view.hive20.resources.udfs.UDF.class);
+    hive1EntitiesMapping.put("org.apache.ambari.view.hive.resources.resources.FileResourceItem",
+        org.apache.ambari.view.hive20.resources.resources.FileResourceItem.class);
+    hive1EntitiesMapping.put("org.apache.ambari.view.hive.TestBean",
+        org.apache.ambari.view.hive20.TestBean.class);
+  }
+
+  @Override
+  public boolean beforeMigration() throws ViewDataMigrationException {
+    return isHive1();
+  }
+
+  @Override
+  public void afterMigration() throws ViewDataMigrationException {
+  }
+
+  @Override
+  public void migrateEntity(Class originEntityClass, Class currentEntityClass) throws ViewDataMigrationException {
+    if (isHive1()) {
+      currentEntityClass = hive1EntitiesMapping.get(originEntityClass.getCanonicalName());
+      if (currentEntityClass == null) {
+        LOG.debug("Mapping was not found for class " + originEntityClass.getCanonicalName());
+        return;
+      }
+
+      migrationContext.copyAllObjects(originEntityClass, currentEntityClass);
+
+    } else {
+      LOG.warn("Unknown migration policy for class");
+    }
+  }
+
+  @Override
+  public void migrateInstanceData() throws ViewDataMigrationException {
+    migrationContext.copyAllInstanceData();
+  }
+
+  private boolean isHive1() {
+    return migrationContext.getOriginDataVersion() < 1;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HelpService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HelpService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HelpService.java
new file mode 100644
index 0000000..9aaaafe
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HelpService.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.hive20.resources.files.FileService;
+import org.apache.ambari.view.hive20.resources.jobs.atsJobs.ATSParserFactory;
+import org.apache.ambari.view.hive20.resources.jobs.atsJobs.ATSRequestsDelegateImpl;
+import org.json.simple.JSONObject;
+
+import javax.inject.Inject;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+
+/**
+ * Help service
+ */
+public class HelpService extends BaseService {
+  @Inject
+  ViewContext context;
+
+  @Inject
+  protected ViewResourceHandler handler;
+
+  /**
+   * Constructor
+   */
+  public HelpService() {
+    super();
+  }
+
+  /**
+   * Version
+   * @return version
+   */
+  @GET
+  @Path("/version")
+  @Produces(MediaType.TEXT_PLAIN)
+  public Response version(){
+    return Response.ok("0.0.1-SNAPSHOT").build();
+  }
+
+  // ================================================================================
+  // Smoke tests
+  // ================================================================================
+
+  /**
+   * HDFS Status
+   * @return status
+   */
+  @GET
+  @Path("/hdfsStatus")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response hdfsStatus(){
+    FileService.hdfsSmokeTest(context);
+    return getOKResponse();
+  }
+
+  /**
+   * HomeDirectory Status
+   * @return status
+   */
+  @GET
+  @Path("/userhomeStatus")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response userhomeStatus (){
+    FileService.userhomeSmokeTest(context);
+    return getOKResponse();
+  }
+
+
+  /**
+   * ATS Status
+   * @return status
+   */
+  @GET
+  @Path("/atsStatus")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response atsStatus() {
+    try {
+      ATSRequestsDelegateImpl atsimpl = new ATSRequestsDelegateImpl(context, ATSParserFactory.getATSUrl(context));
+      atsimpl.checkATSStatus();
+      return getOKResponse();
+    }catch (IOException e){
+      throw new WebApplicationException(e);
+    }
+  }
+
+  private Response getOKResponse() {
+    JSONObject response = new JSONObject();
+    response.put("message", "OK");
+    response.put("trace", null);
+    response.put("status", "200");
+    return Response.ok().entity(response).type(MediaType.APPLICATION_JSON).build();
+  }
+
+  /**
+   * Version
+   * @return version
+   */
+  @GET
+  @Path("/test")
+  @Produces(MediaType.TEXT_PLAIN)
+  public Response testStorage(){
+    TestBean test = new TestBean();
+    test.someData = "hello world";
+    getSharedObjectsFactory().getStorage().store(TestBean.class, test);
+    return Response.ok("OK").build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegate.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegate.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegate.java
new file mode 100644
index 0000000..5cc60fb
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegate.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.actor.message.GetColumnMetadataJob;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.HiveStatement;
+
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+public class HiveJdbcConnectionDelegate implements ConnectionDelegate {
+
+  private ResultSet currentResultSet;
+  private HiveStatement currentStatement;
+
+  @Override
+  public HiveStatement createStatement(HiveConnection connection) throws SQLException {
+    Statement statement = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
+    currentStatement = (HiveStatement) statement;
+    return currentStatement;
+  }
+
+  @Override
+  public Optional<ResultSet> execute(String statement) throws SQLException {
+    if (currentStatement == null) {
+      throw new SQLException("Statement not created. Cannot execute Hive queries");
+    }
+
+    boolean hasResultSet = currentStatement.execute(statement);
+
+    if (hasResultSet) {
+      ResultSet resultSet = currentStatement.getResultSet();
+      currentResultSet = resultSet;
+      return Optional.of(resultSet);
+    } else {
+      return Optional.absent();
+    }
+  }
+
+  @Override
+  public Optional<ResultSet> execute(HiveConnection connection, String sqlStatement) throws SQLException {
+    createStatement(connection);
+    return execute(sqlStatement);
+  }
+
+
+  @Override
+  public ResultSet getColumnMetadata(HiveConnection connection, GetColumnMetadataJob job) throws SQLException {
+    DatabaseMetaData metaData = connection.getMetaData();
+    ResultSet resultSet = metaData.getColumns("", job.getSchemaPattern(), job.getTablePattern(), job.getColumnPattern());
+    currentResultSet = resultSet;
+    return resultSet;
+  }
+
+  @Override
+  public void cancel() throws SQLException {
+    if (currentStatement != null) {
+      currentStatement.cancel();
+    }
+  }
+
+  @Override
+  public void closeResultSet() {
+
+    try {
+      if (currentResultSet != null) {
+        currentResultSet.close();
+      }
+    } catch (SQLException e) {
+      // Cannot do anything here
+    }
+  }
+
+  @Override
+  public void closeStatement() {
+    try {
+      if (currentStatement != null) {
+        currentStatement.close();
+      }
+    } catch (SQLException e) {
+      // cannot do anything here
+    }
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HiveViewImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HiveViewImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HiveViewImpl.java
new file mode 100644
index 0000000..aee7d03
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/HiveViewImpl.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import org.apache.ambari.view.View;
+import org.apache.ambari.view.ViewDefinition;
+import org.apache.ambari.view.ViewInstanceDefinition;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+import org.apache.ambari.view.utils.UserLocal;
+
+
+public class HiveViewImpl implements View {
+  @Override
+  public void onDeploy(ViewDefinition definition) {
+
+  }
+
+  @Override
+  public void onCreate(ViewInstanceDefinition definition) {
+
+  }
+
+  @Override
+  public void onDestroy(ViewInstanceDefinition definition) {
+    SharedObjectsFactory.dropInstanceCache(definition.getInstanceName());
+    ConnectionSystem.getInstance().removeOperationControllerFromCache(definition.getInstanceName());
+  }
+
+  @Override
+  public void onUpdate(ViewInstanceDefinition definition) {
+    //drop all cached connection for instance
+    UserLocal.dropInstanceCache(definition.getInstanceName());
+    SharedObjectsFactory.dropInstanceCache(definition.getInstanceName());
+    ConnectionSystem.getInstance().removeOperationControllerFromCache(definition.getInstanceName());
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/PropertyValidator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/PropertyValidator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/PropertyValidator.java
new file mode 100644
index 0000000..bca3a20
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/PropertyValidator.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import org.apache.ambari.view.ClusterType;
+import org.apache.ambari.view.ViewInstanceDefinition;
+import org.apache.ambari.view.utils.ambari.ValidatorUtils;
+import org.apache.ambari.view.validation.ValidationResult;
+import org.apache.ambari.view.validation.Validator;
+
+public class PropertyValidator implements Validator {
+
+  public static final String WEBHDFS_URL = "webhdfs.url";
+  public static final String HIVE_PORT = "hive.port";
+  public static final String YARN_ATS_URL = "yarn.ats.url";
+  public static final String HIVE_SESSION_PARAMS = "hive.session.params";
+  public static final String USE_HIVE_INTERACTIVE_MODE = "use.hive.interactive.mode";
+
+  @Override
+  public ValidationResult validateInstance(ViewInstanceDefinition viewInstanceDefinition, ValidationContext validationContext) {
+    return null;
+  }
+
+  @Override
+  public ValidationResult validateProperty(String property, ViewInstanceDefinition viewInstanceDefinition, ValidationContext validationContext) {
+    // Validate non cluster associated properties
+    if (property.equals(HIVE_SESSION_PARAMS)) {
+      String auth = viewInstanceDefinition.getPropertyMap().get(HIVE_SESSION_PARAMS);
+
+      if (auth != null && !auth.isEmpty()) {
+        for(String param : auth.split(";")) {
+          String[] keyvalue = param.split("=");
+          if (keyvalue.length != 2) {
+            return new InvalidPropertyValidationResult(false, "Can not parse session param " + param + " in " + auth);
+          }
+        }
+      }
+    }
+
+    if (property.equals(USE_HIVE_INTERACTIVE_MODE)) {
+      String value = viewInstanceDefinition.getPropertyMap().get(USE_HIVE_INTERACTIVE_MODE);
+      if (!("true".equalsIgnoreCase(value) || "false".equalsIgnoreCase(value))) {
+        return new InvalidPropertyValidationResult(false, "Must be 'true' or 'false'");
+      }
+    }
+
+    // if associated with cluster(local or remote), no need to validate associated properties
+    ClusterType clusterType = viewInstanceDefinition.getClusterType();
+    if (clusterType == ClusterType.LOCAL_AMBARI || clusterType == ClusterType.REMOTE_AMBARI) {
+      return ValidationResult.SUCCESS;
+    }
+
+    // Cluster associated properties
+    if (property.equals(WEBHDFS_URL)) {
+      String webhdfsUrl = viewInstanceDefinition.getPropertyMap().get(WEBHDFS_URL);
+      if (!ValidatorUtils.validateHdfsURL(webhdfsUrl)) {
+        return new InvalidPropertyValidationResult(false, "Must be valid URL");
+      }
+    }
+
+    if (property.equals(YARN_ATS_URL)) {
+      String atsUrl = viewInstanceDefinition.getPropertyMap().get(YARN_ATS_URL);
+      if (!ValidatorUtils.validateHttpURL(atsUrl)) {
+        return new InvalidPropertyValidationResult(false, "Must be valid URL");
+      }
+    }
+
+    return ValidationResult.SUCCESS;
+  }
+
+  public static class InvalidPropertyValidationResult implements ValidationResult {
+    private boolean valid;
+    private String detail;
+
+    public InvalidPropertyValidationResult(boolean valid, String detail) {
+      this.valid = valid;
+      this.detail = detail;
+    }
+
+    @Override
+    public boolean isValid() {
+      return valid;
+    }
+
+    @Override
+    public String getDetail() {
+      return detail;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/TestBean.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/TestBean.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/TestBean.java
new file mode 100644
index 0000000..c51c5c2
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/TestBean.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+
+public class TestBean implements Indexed {
+  public String someData;
+  public String id;
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public void setId(String id) {
+    this.id = id;
+  }
+}


[03/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.svg
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.svg b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.svg
new file mode 100644
index 0000000..ed55c10
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.svg
@@ -0,0 +1,308 @@
+<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg xmlns="http://www.w3.org/2000/svg">
+<defs >
+<font id="Roboto" horiz-adv-x="1157" ><font-face
+    font-family="Roboto"
+    units-per-em="2048"
+    panose-1="2 0 0 0 0 0 0 0 0 0"
+    ascent="1900"
+    descent="-500"
+    alphabetic="0" />
+<glyph unicode=" " horiz-adv-x="507" />
+<glyph unicode="!" horiz-adv-x="527" d="M347 411H180L167 1456H361L347 411ZM160 93Q160 138 187 168T269 199T351 169T379 93T351 19T269 -11T188 18T160 93Z" />
+<glyph unicode="&quot;" horiz-adv-x="655" d="M277 1400L247 1042H136L137 1536H277V1400ZM547 1400L517 1042H406L407 1536H547V1400Z" />
+<glyph unicode="#" horiz-adv-x="1261" d="M765 410H501L421 0H278L358 410H119V547H384L453 901H195V1040H480L562 1456H705L623 1040H887L969 1456H1113L1031 1040H1235V901H1004L935 547H1160V410H909L829 0H685L765 410ZM527 547H791L860 901H596L527 547Z" />
+<glyph unicode="$" horiz-adv-x="1150" d="M856 375Q856 467 792 530T574 644Q361 709 264 813T166 1079Q166 1243 261 1348T524 1473V1692H673V1472Q841 1449 934 1331T1028 1008H844Q844 1149 777 1232T596 1315Q477 1315 414 1254T351 1082Q351 980 417 920T636
+810T874 701T1000 562T1041 377Q1041 208 940 105T655 -17V-208H507V-17Q321 0 216 115T110 429H295Q295 290 368 215T575 140Q706 140 781 203T856 375Z" />
+<glyph unicode="%" horiz-adv-x="1500" d="M105 1176Q105 1307 188 1392T403 1477Q536 1477 618 1392T701 1170V1099Q701 967 618 884T405 800Q275 800 190 883T105 1106V1176ZM243 1099Q243 1021 287 971T405 920Q476 920 519 969T563 1103V1176Q563 1254 520
+1305T403 1356T286 1305T243 1172V1099ZM814 357Q814 488 897 572T1112 657T1327 573T1411 350V279Q1411 148 1328 64T1114 -21T899 62T814 285V357ZM952 279Q952 200 996 150T1114 99Q1186 99 1229 148T1272 283V357Q1272 436 1229 486T1112 536Q1041 536 997
+487T952 353V279ZM447 110L342 176L1053 1314L1158 1248L447 110Z" />
+<glyph unicode="&amp;" horiz-adv-x="1273" d="M101 391Q101 496 159 584T383 789Q286 907 253 979T220 1122Q220 1288 318 1382T584 1476Q734 1476 832 1389T930 1168Q930 1080 886 1006T730 849L623 770L947 383Q1015 513 1015 672H1182Q1182 417 1059 249L1267
+0H1045L948 115Q874 49 775 15T572 -20Q359 -20 230 93T101 391ZM572 131Q719 131 841 243L486 668L453 644Q286 521 286 391Q286 273 362 202T572 131ZM405 1128Q405 1032 523 888L641 971Q709 1019 734 1062T759 1168Q759 1235 709 1279T583 1324Q501 1324 453
+1269T405 1128Z" />
+<glyph unicode="&apos;" horiz-adv-x="357" d="M253 1425L232 1057H103L104 1536H253V1425Z" />
+<glyph unicode="(" horiz-adv-x="700" d="M133 591Q133 817 193 1025T374 1403T623 1643L661 1521Q515 1409 422 1179T319 664L318 579Q318 193 459 -91Q544 -261 661 -357L623 -470Q490 -396 369 -222Q133 118 133 591Z" />
+<glyph unicode=")" horiz-adv-x="712" d="M567 581Q567 358 509 154T330 -224T77 -470L38 -357Q192 -239 285 9T381 561V593Q381 803 337 983T215 1307T38 1530L77 1643Q209 1570 328 1399T507 1022T567 581Z" />
+<glyph unicode="*" horiz-adv-x="882" d="M330 983L28 1073L74 1224L376 1112L367 1456H520L510 1107L807 1217L853 1065L546 974L744 703L620 609L434 897L254 616L129 707L330 983Z" />
+<glyph unicode="+" horiz-adv-x="1161" d="M670 781H1076V606H670V146H484V606H78V781H484V1206H670V781Z" />
+<glyph unicode="," horiz-adv-x="402" d="M134 -290L29 -218Q123 -87 127 52V219H308V74Q308 -27 259 -128T134 -290Z" />
+<glyph unicode="-" horiz-adv-x="565" d="M525 543H37V694H525V543Z" />
+<glyph unicode="." horiz-adv-x="539" d="M144 97Q144 145 172 177T258 209T344 177T374 97Q374 51 345 20T258 -11T173 20T144 97Z" />
+<glyph unicode="/" horiz-adv-x="844" d="M177 -125H18L626 1456H784L177 -125Z" />
+<glyph unicode="0" horiz-adv-x="1150" d="M1034 621Q1034 296 923 138T576 -20Q343 -20 231 134T115 596V843Q115 1164 226 1320T574 1476Q809 1476 920 1326T1034 861V621ZM849 874Q849 1109 783 1216T574 1324Q432 1324 367 1217T300 888V592Q300 356 368 244T576
+131Q713 131 779 237T849 571V874Z" />
+<glyph unicode="1" horiz-adv-x="1150" d="M729 0H543V1233L170 1096V1264L700 1463H729V0Z" />
+<glyph unicode="2" horiz-adv-x="1150" d="M1075 0H121V133L625 693Q737 820 779 899T822 1064Q822 1178 753 1251T569 1324Q431 1324 355 1246T278 1027H93Q93 1228 222 1352T569 1476Q772 1476 890 1370T1008 1086Q1008 871 734 574L344 151H1075V0Z" />
+<glyph unicode="3" horiz-adv-x="1150" d="M390 818H529Q660 820 735 887T810 1068Q810 1324 555 1324Q435 1324 364 1256T292 1074H107Q107 1247 233 1361T555 1476Q761 1476 878 1367T995 1064Q995 969 934 880T766 747Q886 709 951 621T1017 406Q1017 210 889
+95T556 -20T223 91T94 384H280Q280 269 355 200T556 131Q690 131 761 201T832 402Q832 529 754 597T529 667H390V818Z" />
+<glyph unicode="4" horiz-adv-x="1150" d="M902 489H1104V338H902V0H716V338H53V447L705 1456H902V489ZM263 489H716V1203L694 1163L263 489Z" />
+<glyph unicode="5" horiz-adv-x="1150" d="M206 730L280 1456H1026V1285H437L393 888Q500 951 636 951Q835 951 952 820T1069 464Q1069 239 948 110T608 -20Q415 -20 293 87T154 383H329Q346 258 418 195T608 131Q737 131 810 219T884 462Q884 608 805 696T593
+785Q472 785 403 732L354 692L206 730Z" />
+<glyph unicode="6" horiz-adv-x="1150" d="M847 1457V1300H813Q597 1296 469 1172T321 823Q436 955 635 955Q825 955 938 821T1052 475Q1052 250 930 115T601 -20Q392 -20 262 140T132 554V625Q132 1027 303 1239T814 1457H847ZM604 801Q509 801 429 744T318 601V533Q318
+353 399 243T601 133Q726 133 797 225T869 466Q869 616 797 708T604 801Z" />
+<glyph unicode="7" horiz-adv-x="1150" d="M1061 1352L458 0H264L865 1304H77V1456H1061V1352Z" />
+<glyph unicode="8" horiz-adv-x="1150" d="M1004 1076Q1004 967 947 882T791 749Q905 700 971 606T1038 393Q1038 204 911 92T575 -20Q365 -20 239 92T112 393Q112 511 176 606T355 750Q258 798 202 883T146 1076Q146 1260 264 1368T575 1476Q767 1476 885 1368T1004
+1076ZM853 397Q853 519 776 596T573 673T373 597T297 397T370 202T575 131Q705 131 779 202T853 397ZM575 1324Q466 1324 399 1257T331 1073Q331 962 397 894T575 825T752 893T819 1073T750 1254T575 1324Z" />
+<glyph unicode="9" horiz-adv-x="1150" d="M830 640Q772 571 692 529T515 487Q389 487 296 549T151 723T100 972Q100 1118 155 1235T313 1414T551 1476Q767 1476 891 1315T1016 874V820Q1016 395 848 200T341 -1H305V155H344Q573 159 696 274T830 640ZM545 640Q638
+640 716 697T831 838V912Q831 1094 752 1208T552 1322Q430 1322 356 1229T282 982Q282 833 353 737T545 640Z" />
+<glyph unicode=":" horiz-adv-x="496" d="M390 97Q390 145 418 177T504 209T590 177T620 97Q620 51 591 20T504 -11T419 20T390 97ZM135 980Q135 1028 163 1060T249 1092T335 1060T365 980Q365 934 336 903T249 872T164 903T135 980Z" />
+<glyph unicode=";" horiz-adv-x="433" d="M111 980Q111 1028 139 1060T225 1092T311 1060T341 980Q341 934 312 903T225 872T140 903T111 980ZM146 -290L41 -218Q135 -87 139 52V219H320V74Q320 -27 271 -128T146 -290Z" />
+<glyph unicode="&lt;" horiz-adv-x="1041" d="M264 644L890 391V195L72 574V720L890 1098V902L264 644Z" />
+<glyph unicode="=" horiz-adv-x="1124" d="M986 814H152V975H986V814ZM986 399H152V559H986V399Z" />
+<glyph unicode="&gt;" horiz-adv-x="1070" d="M795 650L134 909V1099L988 721V575L134 196V388L795 650Z" />
+<glyph unicode="?" horiz-adv-x="967" d="M357 410Q359 529 384 598T486 751L617 886Q701 981 701 1090Q701 1195 646 1254T486 1314Q384 1314 322 1260T260 1115H75Q77 1277 190 1376T486 1476Q675 1476 780 1375T886 1096Q886 921 724 751L615 643Q542 562 542
+410H357ZM349 93Q349 138 376 168T458 199T540 169T568 93T540 19T458 -11T377 18T349 93Z" />
+<glyph unicode="@" horiz-adv-x="1839" d="M1738 502Q1726 260 1618 120T1329 -20Q1142 -20 1089 148Q1035 63 966 22T822 -20Q680 -20 607 96T553 417Q568 582 628 711T784 915T985 989Q1066 989 1130 968T1274 883L1222 329Q1203 98 1350 98Q1463 98 1533 210T1609
+502Q1628 891 1465 1095T967 1299Q766 1299 610 1200T364 912T263 478Q251 230 323 48T542 -231T899 -328Q989 -328 1079 -306T1230 -249L1267 -364Q1205 -403 1103 -428T895 -453Q645 -453 465 -341T196 -17T118 478Q130 753 241 972T542 1311T971 1431Q1220 1431
+1398 1319T1663 996T1738 502ZM712 417Q698 275 738 199T867 123Q927 123 982 174T1074 320L1075 329L1121 832Q1065 861 1001 861Q884 861 808 742T712 417Z" />
+<glyph unicode="A" horiz-adv-x="1336" d="M973 380H363L226 0H28L584 1456H752L1309 0H1112L973 380ZM421 538H916L668 1219L421 538Z" />
+<glyph unicode="B" horiz-adv-x="1275" d="M169 0V1456H645Q882 1456 1001 1358T1121 1068Q1121 966 1063 888T905 766Q1023 733 1091 641T1160 420Q1160 224 1033 112T674 0H169ZM361 681V157H678Q812 157 889 226T967 418Q967 681 681 681H361ZM361 835H651Q777
+835 852 898T928 1069Q928 1189 858 1243T645 1298H361V835Z" />
+<glyph unicode="C" horiz-adv-x="1333" d="M1240 462Q1213 231 1070 106T688 -20Q430 -20 275 165T119 660V800Q119 1003 191 1157T397 1393T705 1476Q937 1476 1077 1347T1240 988H1047Q1022 1162 939 1240T705 1318Q521 1318 417 1182T312 795V654Q312 417 411
+277T688 137Q848 137 933 209T1047 462H1240Z" />
+<glyph unicode="D" horiz-adv-x="1343" d="M169 0V1456H580Q770 1456 916 1372T1141 1133T1222 777V684Q1222 478 1143 323T916 85T572 0H169ZM361 1298V157H563Q785 157 908 295T1032 688V773Q1032 1021 916 1158T585 1298H361Z" />
+<glyph unicode="E" horiz-adv-x="1164" d="M992 673H361V157H1094V0H169V1456H1084V1298H361V830H992V673Z" />
+<glyph unicode="F" horiz-adv-x="1132" d="M972 643H361V0H169V1456H1071V1298H361V800H972V643Z" />
+<glyph unicode="G" horiz-adv-x="1395" d="M1244 191Q1170 85 1038 33T729 -20Q551 -20 413 63T200 301T122 658V785Q122 1114 275 1295T707 1476Q935 1476 1074 1360T1244 1029H1052Q998 1318 708 1318Q515 1318 416 1183T315 790V671Q315 426 427 282T730 137Q838
+137 919 161T1053 242V569H716V725H1244V191Z" />
+<glyph unicode="H" horiz-adv-x="1460" d="M1288 0H1095V673H361V0H169V1456H361V830H1095V1456H1288V0Z" />
+<glyph unicode="I" horiz-adv-x="557" d="M375 0H183V1456H375V0Z" />
+<glyph unicode="J" horiz-adv-x="1130" d="M779 1456H972V425Q972 216 847 98T512 -20Q295 -20 174 91T53 402H245Q245 277 313 207T512 137Q631 137 704 212T779 422V1456Z" />
+<glyph unicode="K" horiz-adv-x="1284" d="M539 677L361 492V0H169V1456H361V736L1008 1456H1240L667 813L1285 0H1055L539 677Z" />
+<glyph unicode="L" horiz-adv-x="1102" d="M362 157H1052V0H169V1456H362V157Z" />
+<glyph unicode="M" horiz-adv-x="1788" d="M417 1456L893 268L1369 1456H1618V0H1426V567L1444 1179L966 0H819L342 1176L361 567V0H169V1456H417Z" />
+<glyph unicode="N" horiz-adv-x="1460" d="M1288 0H1095L362 1122V0H169V1456H362L1097 329V1456H1288V0Z" />
+<glyph unicode="O" horiz-adv-x="1408" d="M1289 681Q1289 467 1217 308T1013 64T705 -20Q533 -20 400 64T194 305T118 668V773Q118 983 191 1144T397 1390T703 1476Q878 1476 1011 1392T1217 1147T1289 773V681ZM1098 775Q1098 1034 994 1172T703 1311Q521 1311
+417 1173T309 788V681Q309 430 414 287T705 143Q891 143 993 278T1098 667V775Z" />
+<glyph unicode="P" horiz-adv-x="1292" d="M361 570V0H169V1456H706Q945 1456 1080 1334T1216 1011Q1216 799 1084 685T704 570H361ZM361 727H706Q860 727 942 799T1024 1009Q1024 1139 942 1217T717 1298H361V727Z" />
+<glyph unicode="Q" horiz-adv-x="1408" d="M1281 681Q1281 470 1214 318T1026 79L1286 -125L1155 -246L848 -2Q776 -20 696 -20Q524 -20 391 64T185 305T109 668V773Q109 983 182 1144T388 1390T694 1476Q870 1476 1003 1391T1209 1147T1281 774V681ZM1089 775Q1089
+1032 987 1171T694 1311Q513 1311 409 1173T301 788V681Q301 431 405 287T696 143T984 278T1089 667V775Z" />
+<glyph unicode="R" horiz-adv-x="1261" d="M703 589H361V0H168V1456H650Q896 1456 1028 1344T1161 1018Q1161 882 1088 781T883 630L1225 12V0H1019L703 589ZM361 746H656Q799 746 883 820T968 1018Q968 1153 888 1225T655 1298H361V746Z" />
+<glyph unicode="S" horiz-adv-x="1215" d="M598 649Q351 720 239 823T126 1079Q126 1251 263 1363T621 1476Q771 1476 888 1418T1070 1258T1135 1035H942Q942 1167 858 1242T621 1318Q479 1318 400 1256T320 1082Q320 993 395 932T652 819T936 707T1088 563T1138
+370Q1138 193 1000 87T631 -20Q481 -20 351 37T151 195T80 422H273Q273 290 370 214T631 137Q783 137 864 199T945 368T870 533T598 649Z" />
+<glyph unicode="T" horiz-adv-x="1222" d="M1175 1298H707V0H516V1298H49V1456H1175V1298Z" />
+<glyph unicode="U" horiz-adv-x="1328" d="M1194 1456V466Q1193 260 1065 129T716 -18L665 -20Q426 -20 284 109T140 464V1456H330V470Q330 312 417 225T665 137Q828 137 914 224T1001 469V1456H1194Z" />
+<glyph unicode="V" horiz-adv-x="1303" d="M651 255L1067 1456H1277L737 0H567L28 1456H237L651 255Z" />
+<glyph unicode="W" horiz-adv-x="1817" d="M483 459L511 267L552 440L840 1456H1002L1283 440L1323 264L1354 460L1580 1456H1773L1420 0H1245L945 1061L922 1172L899 1061L588 0H413L61 1456H253L483 459Z" />
+<glyph unicode="X" horiz-adv-x="1284" d="M644 898L993 1456H1219L759 734L1230 0H1002L644 568L284 0H57L529 734L68 1456H293L644 898Z" />
+<glyph unicode="Y" horiz-adv-x="1230" d="M613 725L993 1456H1211L709 543V0H517V543L15 1456H235L613 725Z" />
+<glyph unicode="Z" horiz-adv-x="1226" d="M313 157H1146V0H86V144L884 1298H99V1456H1114V1315L313 157Z" />
+<glyph unicode="[" horiz-adv-x="543" d="M523 1512H332V-160H523V-312H146V1664H523V1512Z" />
+<glyph unicode="\" horiz-adv-x="840" d="M40 1456H216L824 -125H648L40 1456Z" />
+<glyph unicode="]" horiz-adv-x="543" d="M9 1664H387V-312H9V-160H202V1512H9V1664Z" />
+<glyph unicode="^" horiz-adv-x="856" d="M426 1211L236 729H64L363 1456H490L788 729H617L426 1211Z" />
+<glyph unicode="_" horiz-adv-x="924" d="M920 -151H4V0H920V-151Z" />
+<glyph unicode="`" horiz-adv-x="633" d="M474 1240H315L57 1534H280L474 1240Z" />
+<glyph unicode="a" horiz-adv-x="1114" d="M808 0Q792 32 782 114Q653 -20 474 -20Q314 -20 212 70T109 300Q109 469 237 562T599 656H779V741Q779 838 721 895T550 953Q451 953 384 903T317 782H131Q131 863 188 938T344 1058T561 1102Q748 1102 854 1009T964
+751V253Q964 104 1002 16V0H808ZM501 141Q588 141 666 186T779 303V525H634Q294 525 294 326Q294 239 352 190T501 141Z" />
+<glyph unicode="b" horiz-adv-x="1149" d="M1056 529Q1056 281 942 131T636 -20Q431 -20 319 125L310 0H140V1536H325V963Q437 1102 634 1102T943 953T1056 545V529ZM871 550Q871 739 798 842T588 945Q405 945 325 775V307Q410 137 590 137Q723 137 797 240T871 550Z" />
+<glyph unicode="c" horiz-adv-x="1072" d="M574 131Q673 131 747 191T829 341H1004Q999 248 940 164T783 30T574 -20Q353 -20 223 127T92 531V562Q92 720 150 843T316 1034T573 1102Q755 1102 875 993T1004 710H829Q821 815 750 882T573 950Q432 950 355 849T277
+555V520Q277 333 354 232T574 131Z" />
+<glyph unicode="d" horiz-adv-x="1155" d="M95 550Q95 799 213 950T522 1102Q712 1102 823 972V1536H1008V0H838L829 116Q718 -20 520 -20Q332 -20 214 134T95 536V550ZM280 529Q280 345 356 241T566 137Q742 137 823 295V792Q740 945 568 945Q432 945 356 840T280 529Z" />
+<glyph unicode="e" horiz-adv-x="1085" d="M589 -20Q369 -20 231 124T93 511V545Q93 706 154 832T326 1030T566 1102Q777 1102 894 963T1011 565V488H278Q282 328 371 230T599 131Q697 131 765 171T884 277L997 189Q861 -20 589 -20ZM566 950Q454 950 378 869T284
+640H826V654Q818 795 750 872T566 950Z" />
+<glyph unicode="f" horiz-adv-x="711" d="M231 0V939H60V1082H231V1193Q231 1367 324 1462T587 1557Q651 1557 714 1540L704 1390Q657 1399 604 1399Q514 1399 465 1347T416 1196V1082H647V939H416V0H231Z" />
+<glyph unicode="g" horiz-adv-x="1149" d="M96 550Q96 803 213 952T523 1102Q721 1102 832 962L841 1082H1010V26Q1010 -184 886 -305T551 -426Q434 -426 322 -376T151 -239L247 -128Q366 -275 538 -275Q673 -275 748 -199T824 15V108Q713 -20 521 -20Q331 -20
+214 133T96 550ZM282 529Q282 346 357 242T567 137Q742 137 824 296V790Q739 945 569 945Q434 945 358 840T282 529Z" />
+<glyph unicode="h" horiz-adv-x="1128" d="M325 951Q448 1102 645 1102Q988 1102 991 715V0H806V716Q805 833 753 889T589 945Q499 945 431 897T325 771V0H140V1536H325V951Z" />
+<glyph unicode="i" horiz-adv-x="497" d="M341 0H156V1082H341V0ZM141 1369Q141 1414 168 1445T250 1476T332 1445T360 1369T332 1294T250 1264T169 1294T141 1369Z" />
+<glyph unicode="j" horiz-adv-x="489" d="M331 1082V-125Q331 -437 48 -437Q-13 -437 -65 -419V-271Q-33 -279 19 -279Q81 -279 113 -246T146 -129V1082H331ZM127 1369Q127 1413 154 1444T235 1476Q289 1476 317 1445T345 1369T317 1294T235 1264T154 1294T127 1369Z" />
+<glyph unicode="k" horiz-adv-x="1038" d="M442 501L326 380V0H141V1536H326V607L425 726L762 1082H987L566 630L1036 0H819L442 501Z" />
+<glyph unicode="l" horiz-adv-x="497" d="M341 0H156V1536H341V0Z" />
+<glyph unicode="m" horiz-adv-x="1795" d="M314 1082L319 962Q438 1102 640 1102Q867 1102 949 928Q1003 1006 1089 1054T1294 1102Q1650 1102 1656 725V0H1471V714Q1471 830 1418 887T1240 945Q1137 945 1069 884T990 718V0H804V709Q804 945 573 945Q391 945
+324 790V0H139V1082H314Z" />
+<glyph unicode="n" horiz-adv-x="1130" d="M315 1082L321 946Q445 1102 645 1102Q988 1102 991 715V0H806V716Q805 833 753 889T589 945Q499 945 431 897T325 771V0H140V1082H315Z" />
+<glyph unicode="o" horiz-adv-x="1168" d="M91 551Q91 710 153 837T327 1033T582 1102Q803 1102 939 949T1076 542V529Q1076 371 1016 246T843 50T584 -20Q364 -20 228 133T91 538V551ZM277 529Q277 349 360 240T584 131Q725 131 808 241T891 551Q891 729 807
+839T582 950Q445 950 361 841T277 529Z" />
+<glyph unicode="p" horiz-adv-x="1149" d="M1054 529Q1054 282 941 131T635 -20Q438 -20 325 105V-416H140V1082H309L318 962Q431 1102 632 1102Q827 1102 940 955T1054 546V529ZM869 550Q869 733 791 839T577 945Q409 945 325 796V279Q408 131 579 131Q712 131
+790 236T869 550Z" />
+<glyph unicode="q" horiz-adv-x="1164" d="M95 550Q95 805 212 953T526 1102Q718 1102 829 973L837 1082H1007V-416H822V100Q710 -20 524 -20Q328 -20 212 132T95 537V550ZM280 529Q280 343 358 237T570 131Q735 131 822 277V807Q734 950 572 950Q438 950 359
+844T280 529Z" />
+<glyph unicode="r" horiz-adv-x="693" d="M663 916Q621 923 572 923Q390 923 325 768V0H140V1082H320L323 957Q414 1102 581 1102Q635 1102 663 1088V916Z" />
+<glyph unicode="s" horiz-adv-x="1056" d="M770 287Q770 362 714 403T517 475T294 547T172 647T132 785Q132 918 244 1010T532 1102Q716 1102 830 1007T945 764H759Q759 840 695 895T532 950Q431 950 374 906T317 791Q317 724 370 690T561 625T786 551T913 448T955
+300Q955 155 839 68T538 -20Q408 -20 308 26T152 154T95 333H280Q285 240 354 186T538 131Q643 131 706 173T770 287Z" />
+<glyph unicode="t" horiz-adv-x="669" d="M391 1344V1082H593V939H391V268Q391 203 418 171T510 138Q542 138 598 150V0Q525 -20 456 -20Q332 -20 269 55T206 268V939H9V1082H206V1344H391Z" />
+<glyph unicode="u" horiz-adv-x="1129" d="M808 107Q700 -20 491 -20Q318 -20 228 80T136 378V1082H321V383Q321 137 521 137Q733 137 803 295V1082H988V0H812L808 107Z" />
+<glyph unicode="v" horiz-adv-x="992" d="M497 251L765 1082H954L566 0H425L33 1082H222L497 251Z" />
+<glyph unicode="w" horiz-adv-x="1539" d="M1098 255L1306 1082H1491L1176 0H1026L763 820L507 0H357L43 1082H227L440 272L692 1082H841L1098 255Z" />
+<glyph unicode="x" horiz-adv-x="1015" d="M503 687L743 1082H959L605 547L970 0H756L506 405L256 0H41L406 547L52 1082H266L503 687Z" />
+<glyph unicode="y" horiz-adv-x="969" d="M494 271L746 1082H944L509 -167Q408 -437 188 -437L153 -434L84 -421V-271L134 -275Q228 -275 280 -237T367 -98L408 12L22 1082H224L494 271Z" />
+<glyph unicode="z" horiz-adv-x="1015" d="M314 151H947V0H88V136L685 929H97V1082H917V951L314 151Z" />
+<glyph unicode="{" horiz-adv-x="693" d="M632 -366Q455 -316 366 -202T276 101V300Q276 543 64 543V688Q276 688 276 930V1138Q278 1321 365 1433T632 1597L670 1482Q461 1415 461 1133V931Q461 704 294 615Q461 525 461 296V90Q464 -185 670 -251L632 -366Z" />
+<glyph unicode="|" horiz-adv-x="499" d="M324 -270H175V1456H324V-270Z" />
+<glyph unicode="}" horiz-adv-x="693" d="M19 -251Q222 -186 229 80V300Q229 531 410 615Q229 697 229 930V1133Q229 1415 20 1482L58 1597Q235 1547 324 1435T414 1137V927Q414 688 626 688V543Q414 543 414 300V98Q414 -90 324 -203T58 -366L19 -251Z" />
+<glyph unicode="~" horiz-adv-x="1393" d="M1263 777Q1263 619 1170 511T939 402Q867 402 803 428T655 529T533 621T454 639Q376 639 334 586T292 438L131 436Q131 596 223 699T454 802Q530 802 600 770T758 658T910 567L939 565Q1015 565 1062 623T1110 776L1263 777Z" />
+<glyph unicode="&#xa0;" horiz-adv-x="507" />
+<glyph unicode="&#xa1;" horiz-adv-x="499" d="M170 684H338L351 -360H157L170 684ZM358 996Q358 951 331 920T249 889T167 920T139 996T167 1071T249 1101T330 1071T358 996Z" />
+<glyph unicode="&#xa2;" horiz-adv-x="1120" d="M586 131Q686 131 760 191T842 341H1017Q1011 215 912 115T669 -12V-245H484V-11Q305 23 205 165T105 527V562Q105 774 206 916T484 1092V1318H669V1095Q819 1072 915 966T1017 710H842Q834 815 763 882T586 950Q445
+950 368 849T290 555V520Q290 333 367 232T586 131Z" />
+<glyph unicode="&#xa3;" horiz-adv-x="1190" d="M449 622L457 402Q457 248 395 157H1128L1127 0H95V157H172Q212 166 237 231T264 393V401L256 622H91V779H251L242 1039Q242 1238 364 1357T687 1476Q877 1476 988 1370T1099 1087H908Q908 1194 845 1256T670 1318Q565
+1318 500 1241T435 1039L444 779H763V622H449Z" />
+<glyph unicode="&#xa4;" horiz-adv-x="1460" d="M1103 112Q944 -20 735 -20Q528 -20 369 110L235 -26L105 109L244 250Q140 406 140 608Q140 814 252 977L105 1128L235 1264L382 1114Q540 1234 735 1234Q931 1234 1090 1113L1239 1265L1371 1128L1220 974Q1330
+811 1330 608Q1330 412 1228 253L1371 109L1239 -27L1103 112ZM311 608Q311 485 368 379T524 212T735 151T946 212T1100 379T1157 608Q1157 730 1101 835T946 1001T735 1062Q622 1062 524 1002T369 836T311 608Z" />
+<glyph unicode="&#xa5;" horiz-adv-x="1240" d="M614 782L978 1456H1197L779 736H1091V611H707V446H1091V322H707V0H514V322H136V446H514V611H136V736H449L31 1456H251L614 782Z" />
+<glyph unicode="&#xa6;" horiz-adv-x="491" d="M147 -270V521H333V-270H147ZM333 698H147V1456H333V698Z" />
+<glyph unicode="&#xa7;" horiz-adv-x="1256" d="M1145 431Q1145 242 959 157Q1028 108 1064 40T1100 -128Q1100 -296 970 -395T612 -495Q500 -495 400 -467T229 -382Q90 -269 90 -64L276 -62Q276 -192 366 -267T612 -343Q748 -343 831 -285T914 -130Q914 -41 843
+11T563 126Q381 174 285 229T143 362T96 551Q96 737 278 825Q212 874 177 942T141 1110Q141 1276 274 1376T630 1476Q862 1476 992 1363T1122 1045H937Q937 1170 853 1247T630 1325Q488 1325 408 1268T327 1112Q327 1043 355 1003T450 931T661 858T889 782T1030
+698T1116 585T1145 431ZM602 691Q512 715 437 742Q357 723 320 673T282 553Q282 483 309 443T402 370T611 296T797 238Q875 258 917 308T959 428Q959 516 890 570T602 691Z" />
+<glyph unicode="&#xa8;" horiz-adv-x="856" d="M102 1371Q102 1416 129 1446T211 1477T293 1447T321 1371T293 1296T211 1266T130 1296T102 1371ZM532 1369Q532 1414 559 1445T641 1476T723 1445T751 1369T723 1294T641 1264T560 1294T532 1369Z" />
+<glyph unicode="&#xa9;" horiz-adv-x="1609" d="M1119 597Q1119 444 1033 364T788 283Q631 283 537 388T442 676V786Q442 962 537 1067T788 1173Q948 1173 1034 1091T1120 860H974Q974 959 927 1001T788 1044Q694 1044 640 975T586 783V670Q586 550 640 481T788
+412Q880 412 926 454T973 597H1119ZM206 729Q206 557 286 411T503 181T801 98T1098 181T1315 410T1395 729Q1395 899 1316 1044T1100 1272T801 1356Q641 1356 503 1274T286 1045T206 729ZM91 729Q91 931 184 1104T443 1376T801 1476T1158 1377T1416 1104T1510 729Q1510
+532 1420 360T1165 84T801 -21Q604 -21 439 82T182 358T91 729Z" />
+<glyph unicode="&#xaa;" horiz-adv-x="915" d="M618 705Q606 739 600 777Q524 691 396 691Q277 691 212 753T147 918Q147 1029 230 1089T486 1149H594V1201Q594 1336 470 1336Q401 1336 362 1309T322 1231L161 1243Q161 1346 247 1411T470 1476Q603 1476 680 1405T757
+1199V883Q757 786 783 705H618ZM435 828Q478 828 522 848T594 895V1037H482Q399 1036 355 1005T310 922Q310 828 435 828Z" />
+<glyph unicode="&#xab;" horiz-adv-x="961" d="M536 804L794 407H653L358 795V814L653 1203H794L536 804ZM610 548L868 151H727L432 539V558L727 947H868L610 548Z" />
+<glyph unicode="&#xac;" horiz-adv-x="1134" d="M958 375H772V639H127V800H958V375Z" />
+<glyph unicode="&#xad;" horiz-adv-x="565" d="M525 543H37V694H525V543Z" />
+<glyph unicode="&#xae;" horiz-adv-x="1610" d="M90 729Q90 931 183 1104T442 1376T800 1476T1157 1377T1415 1104T1509 729Q1509 532 1419 360T1164 84T800 -21Q603 -21 438 82T181 358T90 729ZM205 729Q205 557 285 411T502 181T800 98Q961 98 1099 182T1315
+412T1394 729Q1394 900 1316 1044T1099 1272T800 1356Q640 1356 502 1274T285 1045T205 729ZM653 654V316H512V1165H788Q941 1165 1025 1100T1110 909Q1110 786 982 721Q1104 671 1105 517V456Q1105 370 1122 332V316H977Q963 352 963 444T960 554Q944 650 829
+654H653ZM653 782H809Q881 784 925 817T969 904Q969 977 930 1007T791 1038H653V782Z" />
+<glyph unicode="&#xaf;" horiz-adv-x="938" d="M834 1313H120V1456H834V1313Z" />
+<glyph unicode="&#xb0;" horiz-adv-x="765" d="M130 1216Q130 1320 204 1398T385 1476Q489 1476 562 1399T636 1216Q636 1110 563 1035T385 960Q280 960 205 1035T130 1216ZM385 1088Q439 1088 476 1123T513 1216Q513 1274 476 1311T385 1349Q330 1349 293 1310T255
+1216T292 1125T385 1088Z" />
+<glyph unicode="&#xb1;" horiz-adv-x="1094" d="M649 854H1013V703H649V289H482V703H97V854H482V1267H649V854ZM970 0H135V152H970V0Z" />
+<glyph unicode="&#xb2;" horiz-adv-x="751" d="M683 667H84V775L384 1057Q493 1159 493 1228Q493 1277 461 1307T369 1338Q294 1338 259 1300T223 1205H66Q66 1319 149 1393T365 1467T574 1404T651 1230Q651 1126 544 1019L460 940L284 795H683V667Z" />
+<glyph unicode="&#xb3;" horiz-adv-x="751" d="M265 1125H349Q423 1125 459 1155T495 1234Q495 1279 464 1308T362 1337Q305 1337 268 1312T230 1245H73Q73 1343 154 1404T360 1466Q497 1466 575 1406T653 1241Q653 1186 618 1141T517 1070Q666 1029 666 886Q666
+780 581 718T360 655Q228 655 145 718T62 888H220Q220 843 259 813T366 783Q436 783 472 813T509 894Q509 1007 353 1009H265V1125Z" />
+<glyph unicode="&#xb4;" horiz-adv-x="642" d="M316 1534H540L272 1240H123L316 1534Z" />
+<glyph unicode="&#xb5;" horiz-adv-x="1160" d="M339 1082V449Q340 286 391 208T559 130Q758 130 820 282V1082H1006V0H839L830 115Q737 -20 567 -20Q420 -20 339 53V-416H154V1082H339Z" />
+<glyph unicode="&#xb6;" horiz-adv-x="1001" d="M646 0V520H562Q332 520 200 647T67 988Q67 1201 200 1328T563 1456H832V0H646Z" />
+<glyph unicode="&#xb7;" horiz-adv-x="534" d="M147 729Q147 777 175 809T261 841T347 809T377 729Q377 682 348 651T261 619T176 650T147 729Z" />
+<glyph unicode="&#xb8;" horiz-adv-x="507" d="M285 0L273 -52Q426 -79 426 -225Q426 -322 346 -378T123 -435L116 -328Q195 -328 238 -302T282 -229Q282 -185 250 -164T120 -134L152 0H285Z" />
+<glyph unicode="&#xb9;" horiz-adv-x="751" d="M495 674H338V1275L122 1218V1346L477 1463H495V674Z" />
+<glyph unicode="&#xba;" horiz-adv-x="931" d="M122 1123Q122 1281 216 1378T464 1476Q619 1476 713 1380T807 1117V1043Q807 884 714 787T466 690T217 787T122 1049V1123ZM285 1043Q285 943 333 886T466 829Q549 829 596 886T644 1045V1123Q644 1222 596 1279T464
+1336Q383 1336 335 1281T285 1129V1043Z" />
+<glyph unicode="&#xbb;" horiz-adv-x="960" d="M244 949L539 560V541L244 152H102L360 550L102 949H244ZM593 949L888 560V541L593 152H451L709 550L451 949H593Z" />
+<glyph unicode="&#xbc;" horiz-adv-x="1500" d="M458 664H301V1265L85 1208V1336L440 1453H458V664ZM443 118L339 184L1050 1322L1154 1256L443 118ZM1318 299H1425V169H1318V0H1161V169H786L780 271L1157 789H1318V299ZM938 299H1161V588L1144 560L938 299Z" />
+<glyph unicode="&#xbd;" horiz-adv-x="1589" d="M399 118L295 184L1006 1322L1110 1256L399 118ZM453 664H296V1265L80 1208V1336L435 1453H453V664ZM1481 0H882V108L1182 390Q1291 492 1291 561Q1291 610 1259 640T1167 671Q1092 671 1057 633T1021 538H864Q864
+652 947 726T1163 800T1372 737T1449 563Q1449 459 1342 352L1258 273L1082 128H1481V0Z" />
+<glyph unicode="&#xbe;" horiz-adv-x="1593" d="M570 118L466 184L1177 1322L1281 1256L570 118ZM1410 299H1517V169H1410V0H1253V169H878L872 271L1249 789H1410V299ZM1030 299H1253V588L1236 560L1030 299ZM314 1126H398Q472 1126 508 1156T544 1235Q544 1280
+513 1309T411 1338Q354 1338 317 1313T279 1246H122Q122 1344 203 1405T409 1467Q546 1467 624 1407T702 1242Q702 1187 667 1142T566 1071Q715 1030 715 887Q715 781 630 719T409 656Q277 656 194 719T111 889H269Q269 844 308 814T415 784Q485 784 521 814T558
+895Q558 1008 402 1010H314V1126Z" />
+<glyph unicode="&#xbf;" horiz-adv-x="969" d="M588 680Q587 574 567 511T498 388T358 233T255 37L253 0Q253 -109 311 -166T478 -224Q578 -224 640 -168T703 -20H888Q886 -181 774 -283T478 -385Q282 -385 175 -285T68 -5Q68 168 228 343L337 456Q403 534 403
+680H588ZM596 997Q596 952 569 921T487 890T405 921T377 997Q377 1041 405 1071T487 1101T568 1071T596 997Z" />
+<glyph unicode="&#xc0;" horiz-adv-x="1336" d="M973 380H363L226 0H28L584 1456H752L1309 0H1112L973 380ZM421 538H916L668 1219L421 538ZM778 1550H619L361 1844H584L778 1550Z" />
+<glyph unicode="&#xc1;" horiz-adv-x="1336" d="M973 380H363L226 0H28L584 1456H752L1309 0H1112L973 380ZM421 538H916L668 1219L421 538ZM763 1844H987L719 1550H570L763 1844Z" />
+<glyph unicode="&#xc2;" horiz-adv-x="1336" d="M973 380H363L226 0H28L584 1456H752L1309 0H1112L973 380ZM421 538H916L668 1219L421 538ZM975 1572V1562H822L672 1732L523 1562H370V1574L616 1846H728L975 1572Z" />
+<glyph unicode="&#xc3;" horiz-adv-x="1336" d="M973 380H363L226 0H28L584 1456H752L1309 0H1112L973 380ZM421 538H916L668 1219L421 538ZM1027 1814Q1027 1706 966 1639T812 1572Q771 1572 741 1582T663 1623T593 1660T543 1667Q502 1667 473 1636T444 1555L320
+1562Q320 1669 380 1739T534 1809Q569 1809 597 1799T673 1760T746 1722T803 1713Q846 1713 874 1747T903 1826L1027 1814Z" />
+<glyph unicode="&#xc4;" horiz-adv-x="1336" d="M973 380H363L226 0H28L584 1456H752L1309 0H1112L973 380ZM421 538H916L668 1219L421 538ZM351 1681Q351 1726 378 1756T460 1787T542 1757T570 1681T542 1606T460 1576T379 1606T351 1681ZM781 1679Q781 1724
+808 1755T890 1786T972 1755T1000 1679T972 1604T890 1574T809 1604T781 1679Z" />
+<glyph unicode="&#xc5;" horiz-adv-x="1336" d="M973 380H363L226 0H28L584 1456H752L1309 0H1112L973 380ZM421 538H916L668 1219L421 538ZM887 1729Q887 1642 825 1584T672 1525Q580 1525 519 1584T457 1729T518 1876T672 1937T825 1876T887 1729ZM556 1729Q556
+1682 589 1648T672 1614Q720 1614 754 1647T788 1729T755 1812T672 1847Q622 1847 589 1812T556 1729Z" />
+<glyph unicode="&#xc6;" horiz-adv-x="1914" d="M1879 0H996L981 353H417L212 0H-14L866 1456H1817V1304H1126L1146 833H1736V682H1152L1174 151H1879V0ZM518 527H974L943 1260L518 527Z" />
+<glyph unicode="&#xc7;" horiz-adv-x="1333" d="M1240 462Q1213 231 1070 106T688 -20Q430 -20 275 165T119 660V800Q119 1003 191 1157T397 1393T705 1476Q937 1476 1077 1347T1240 988H1047Q1022 1162 939 1240T705 1318Q521 1318 417 1182T312 795V654Q312
+417 411 277T688 137Q848 137 933 209T1047 462H1240ZM751 -9L739 -61Q892 -88 892 -234Q892 -331 812 -387T589 -444L582 -337Q661 -337 704 -311T748 -238Q748 -194 716 -173T586 -143L618 -9H751Z" />
+<glyph unicode="&#xc8;" horiz-adv-x="1164" d="M992 673H361V157H1094V0H169V1456H1084V1298H361V830H992V673ZM725 1562H566L308 1856H531L725 1562Z" />
+<glyph unicode="&#xc9;" horiz-adv-x="1164" d="M992 673H361V157H1094V0H169V1456H1084V1298H361V830H992V673ZM710 1856H934L666 1562H517L710 1856Z" />
+<glyph unicode="&#xca;" horiz-adv-x="1164" d="M992 673H361V157H1094V0H169V1456H1084V1298H361V830H992V673ZM922 1584V1574H769L619 1744L470 1574H317V1586L563 1858H675L922 1584Z" />
+<glyph unicode="&#xcb;" horiz-adv-x="1164" d="M992 673H361V157H1094V0H169V1456H1084V1298H361V830H992V673ZM298 1693Q298 1738 325 1768T407 1799T489 1769T517 1693T489 1618T407 1588T326 1618T298 1693ZM728 1691Q728 1736 755 1767T837 1798T919 1767T947
+1691T919 1616T837 1586T756 1616T728 1691Z" />
+<glyph unicode="&#xcc;" horiz-adv-x="557" d="M375 0H183V1456H375V0ZM385 1562H226L-32 1856H191L385 1562Z" />
+<glyph unicode="&#xcd;" horiz-adv-x="557" d="M375 0H183V1456H375V0ZM369 1856H593L325 1562H176L369 1856Z" />
+<glyph unicode="&#xce;" horiz-adv-x="557" d="M375 0H183V1456H375V0ZM582 1584V1574H429L279 1744L130 1574H-23V1586L223 1858H335L582 1584Z" />
+<glyph unicode="&#xcf;" horiz-adv-x="557" d="M375 0H183V1456H375V0ZM-42 1693Q-42 1738 -15 1768T67 1799T149 1769T177 1693T149 1618T67 1588T-14 1618T-42 1693ZM388 1691Q388 1736 415 1767T497 1798T579 1767T607 1691T579 1616T497 1586T416 1616T388 1691Z" />
+<glyph unicode="&#xd0;" horiz-adv-x="1373" d="M199 0V666H37V817H199V1456H610Q800 1456 946 1372T1171 1133T1252 777V684Q1252 478 1173 323T946 85T602 0H199ZM673 666H391V157H592Q814 157 937 294T1062 680V773Q1062 1021 946 1158T615 1298H391V817H673V666Z" />
+<glyph unicode="&#xd1;" horiz-adv-x="1460" d="M1288 0H1095L362 1122V0H169V1456H362L1097 329V1456H1288V0ZM1081 1814Q1081 1706 1020 1639T866 1572Q825 1572 795 1582T717 1623T647 1660T597 1667Q556 1667 527 1636T498 1555L374 1562Q374 1669 434 1739T588
+1809Q623 1809 651 1799T727 1760T800 1722T857 1713Q900 1713 928 1747T957 1826L1081 1814Z" />
+<glyph unicode="&#xd2;" horiz-adv-x="1408" d="M1289 681Q1289 467 1217 308T1013 64T705 -20Q533 -20 400 64T194 305T118 668V773Q118 983 191 1144T397 1390T703 1476Q878 1476 1011 1392T1217 1147T1289 773V681ZM1098 775Q1098 1034 994 1172T703 1311Q521
+1311 417 1173T309 788V681Q309 430 414 287T705 143Q891 143 993 278T1098 667V775ZM812 1552H653L395 1846H618L812 1552Z" />
+<glyph unicode="&#xd3;" horiz-adv-x="1408" d="M1289 681Q1289 467 1217 308T1013 64T705 -20Q533 -20 400 64T194 305T118 668V773Q118 983 191 1144T397 1390T703 1476Q878 1476 1011 1392T1217 1147T1289 773V681ZM1098 775Q1098 1034 994 1172T703 1311Q521
+1311 417 1173T309 788V681Q309 430 414 287T705 143Q891 143 993 278T1098 667V775ZM797 1846H1021L753 1552H604L797 1846Z" />
+<glyph unicode="&#xd4;" horiz-adv-x="1408" d="M1289 681Q1289 467 1217 308T1013 64T705 -20Q533 -20 400 64T194 305T118 668V773Q118 983 191 1144T397 1390T703 1476Q878 1476 1011 1392T1217 1147T1289 773V681ZM1098 775Q1098 1034 994 1172T703 1311Q521
+1311 417 1173T309 788V681Q309 430 414 287T705 143Q891 143 993 278T1098 667V775ZM1009 1574V1564H856L706 1734L557 1564H404V1576L650 1848H762L1009 1574Z" />
+<glyph unicode="&#xd5;" horiz-adv-x="1408" d="M1289 681Q1289 467 1217 308T1013 64T705 -20Q533 -20 400 64T194 305T118 668V773Q118 983 191 1144T397 1390T703 1476Q878 1476 1011 1392T1217 1147T1289 773V681ZM1098 775Q1098 1034 994 1172T703 1311Q521
+1311 417 1173T309 788V681Q309 430 414 287T705 143Q891 143 993 278T1098 667V775ZM1061 1816Q1061 1708 1000 1641T846 1574Q805 1574 775 1584T697 1625T627 1662T577 1669Q536 1669 507 1638T478 1557L354 1564Q354 1671 414 1741T568 1811Q603 1811 631 1801T707
+1762T780 1724T837 1715Q880 1715 908 1749T937 1828L1061 1816Z" />
+<glyph unicode="&#xd6;" horiz-adv-x="1408" d="M1289 681Q1289 467 1217 308T1013 64T705 -20Q533 -20 400 64T194 305T118 668V773Q118 983 191 1144T397 1390T703 1476Q878 1476 1011 1392T1217 1147T1289 773V681ZM1098 775Q1098 1034 994 1172T703 1311Q521
+1311 417 1173T309 788V681Q309 430 414 287T705 143Q891 143 993 278T1098 667V775ZM385 1683Q385 1728 412 1758T494 1789T576 1759T604 1683T576 1608T494 1578T413 1608T385 1683ZM815 1681Q815 1726 842 1757T924 1788T1006 1757T1034 1681T1006 1606T924
+1576T843 1606T815 1681Z" />
+<glyph unicode="&#xd7;" horiz-adv-x="1092" d="M89 329L419 665L91 1000L210 1123L539 788L868 1123L987 1000L659 665L989 329L870 206L539 543L208 206L89 329Z" />
+<glyph unicode="&#xd8;" horiz-adv-x="1408" d="M1289 681Q1289 467 1217 308T1013 64T705 -20Q534 -20 403 62L306 -93H164L308 138Q118 330 118 690V773Q118 983 191 1144T397 1390T703 1476Q917 1476 1065 1351L1168 1516H1309L1150 1261Q1287 1074 1289 780V681ZM309
+681Q309 437 407 296L971 1200Q869 1311 703 1311Q521 1311 417 1173T309 788V681ZM1098 775Q1098 957 1042 1088L493 207Q584 143 705 143Q891 143 993 278T1098 667V775Z" />
+<glyph unicode="&#xd9;" horiz-adv-x="1328" d="M1194 1456V466Q1193 260 1065 129T716 -18L665 -20Q426 -20 284 109T140 464V1456H330V470Q330 312 417 225T665 137Q828 137 914 224T1001 469V1456H1194ZM773 1550H614L356 1844H579L773 1550Z" />
+<glyph unicode="&#xda;" horiz-adv-x="1328" d="M1194 1456V466Q1193 260 1065 129T716 -18L665 -20Q426 -20 284 109T140 464V1456H330V470Q330 312 417 225T665 137Q828 137 914 224T1001 469V1456H1194ZM758 1844H982L714 1550H565L758 1844Z" />
+<glyph unicode="&#xdb;" horiz-adv-x="1328" d="M1194 1456V466Q1193 260 1065 129T716 -18L665 -20Q426 -20 284 109T140 464V1456H330V470Q330 312 417 225T665 137Q828 137 914 224T1001 469V1456H1194ZM970 1572V1562H817L667 1732L518 1562H365V1574L611
+1846H723L970 1572Z" />
+<glyph unicode="&#xdc;" horiz-adv-x="1328" d="M1194 1456V466Q1193 260 1065 129T716 -18L665 -20Q426 -20 284 109T140 464V1456H330V470Q330 312 417 225T665 137Q828 137 914 224T1001 469V1456H1194ZM346 1681Q346 1726 373 1756T455 1787T537 1757T565
+1681T537 1606T455 1576T374 1606T346 1681ZM776 1679Q776 1724 803 1755T885 1786T967 1755T995 1679T967 1604T885 1574T804 1604T776 1679Z" />
+<glyph unicode="&#xdd;" horiz-adv-x="1230" d="M613 725L993 1456H1211L709 543V0H517V543L15 1456H235L613 725ZM708 1844H932L664 1550H515L708 1844Z" />
+<glyph unicode="&#xde;" horiz-adv-x="1210" d="M352 1456V1163H631Q778 1163 888 1111T1057 961T1117 738Q1117 544 985 429T626 313H352V0H166V1456H352ZM352 1011V465H629Q771 465 851 540T931 736Q931 859 851 934T635 1011H352Z" />
+<glyph unicode="&#xdf;" horiz-adv-x="1218" d="M324 0H139V1111Q139 1319 242 1436T532 1554Q712 1554 810 1465T909 1216Q909 1091 845 990T781 819Q781 768 818 721T950 601T1087 461T1130 317Q1130 158 1029 69T745 -20Q664 -20 574 2T445 52L488 207Q537
+175 604 153T725 131Q832 131 888 178T945 307Q945 359 908 407T777 528T639 671T595 821Q595 910 664 1013T734 1201Q734 1295 682 1348T542 1402Q324 1402 324 1109V0Z" />
+<glyph unicode="&#xe0;" horiz-adv-x="1114" d="M808 0Q792 32 782 114Q653 -20 474 -20Q314 -20 212 70T109 300Q109 469 237 562T599 656H779V741Q779 838 721 895T550 953Q451 953 384 903T317 782H131Q131 863 188 938T344 1058T561 1102Q748 1102 854 1009T964
+751V253Q964 104 1002 16V0H808ZM501 141Q588 141 666 186T779 303V525H634Q294 525 294 326Q294 239 352 190T501 141ZM687 1240H528L270 1534H493L687 1240Z" />
+<glyph unicode="&#xe1;" horiz-adv-x="1114" d="M808 0Q792 32 782 114Q653 -20 474 -20Q314 -20 212 70T109 300Q109 469 237 562T599 656H779V741Q779 838 721 895T550 953Q451 953 384 903T317 782H131Q131 863 188 938T344 1058T561 1102Q748 1102 854 1009T964
+751V253Q964 104 1002 16V0H808ZM501 141Q588 141 666 186T779 303V525H634Q294 525 294 326Q294 239 352 190T501 141ZM672 1534H896L628 1240H479L672 1534Z" />
+<glyph unicode="&#xe2;" horiz-adv-x="1114" d="M808 0Q792 32 782 114Q653 -20 474 -20Q314 -20 212 70T109 300Q109 469 237 562T599 656H779V741Q779 838 721 895T550 953Q451 953 384 903T317 782H131Q131 863 188 938T344 1058T561 1102Q748 1102 854 1009T964
+751V253Q964 104 1002 16V0H808ZM501 141Q588 141 666 186T779 303V525H634Q294 525 294 326Q294 239 352 190T501 141ZM884 1262V1252H731L581 1422L432 1252H279V1264L525 1536H637L884 1262Z" />
+<glyph unicode="&#xe3;" horiz-adv-x="1114" d="M808 0Q792 32 782 114Q653 -20 474 -20Q314 -20 212 70T109 300Q109 469 237 562T599 656H779V741Q779 838 721 895T550 953Q451 953 384 903T317 782H131Q131 863 188 938T344 1058T561 1102Q748 1102 854 1009T964
+751V253Q964 104 1002 16V0H808ZM501 141Q588 141 666 186T779 303V525H634Q294 525 294 326Q294 239 352 190T501 141ZM936 1504Q936 1396 875 1329T721 1262Q680 1262 650 1272T572 1313T502 1350T452 1357Q411 1357 382 1326T353 1245L229 1252Q229 1359 289
+1429T443 1499Q478 1499 506 1489T582 1450T655 1412T712 1403Q755 1403 783 1437T812 1516L936 1504Z" />
+<glyph unicode="&#xe4;" horiz-adv-x="1114" d="M808 0Q792 32 782 114Q653 -20 474 -20Q314 -20 212 70T109 300Q109 469 237 562T599 656H779V741Q779 838 721 895T550 953Q451 953 384 903T317 782H131Q131 863 188 938T344 1058T561 1102Q748 1102 854 1009T964
+751V253Q964 104 1002 16V0H808ZM501 141Q588 141 666 186T779 303V525H634Q294 525 294 326Q294 239 352 190T501 141ZM260 1371Q260 1416 287 1446T369 1477T451 1447T479 1371T451 1296T369 1266T288 1296T260 1371ZM690 1369Q690 1414 717 1445T799 1476T881
+1445T909 1369T881 1294T799 1264T718 1294T690 1369Z" />
+<glyph unicode="&#xe5;" horiz-adv-x="1114" d="M808 0Q792 32 782 114Q653 -20 474 -20Q314 -20 212 70T109 300Q109 469 237 562T599 656H779V741Q779 838 721 895T550 953Q451 953 384 903T317 782H131Q131 863 188 938T344 1058T561 1102Q748 1102 854 1009T964
+751V253Q964 104 1002 16V0H808ZM501 141Q588 141 666 186T779 303V525H634Q294 525 294 326Q294 239 352 190T501 141ZM796 1419Q796 1332 734 1274T581 1215Q489 1215 428 1274T366 1419T427 1566T581 1627T734 1566T796 1419ZM465 1419Q465 1372 498 1338T581
+1304Q629 1304 663 1337T697 1419T664 1502T581 1537Q531 1537 498 1502T465 1419Z" />
+<glyph unicode="&#xe6;" horiz-adv-x="1729" d="M1262 -20Q1001 -20 865 160Q800 74 687 27T433 -20Q266 -20 172 66T78 304Q78 461 191 548T526 635H749V720Q749 827 694 888T535 950Q430 950 360 895T290 759L106 778Q106 921 227 1011T535 1102Q650 1102 738
+1061T876 936Q939 1015 1026 1058T1218 1102Q1428 1102 1544 974T1660 612V497H932Q939 321 1026 226T1262 130Q1410 130 1531 206L1578 237L1642 101Q1484 -20 1262 -20ZM469 130Q541 130 620 167T749 258V495H521Q404 493 334 438T264 300Q264 223 317 177T469
+130ZM1218 950Q1103 950 1029 865T937 640H1475V671Q1475 803 1408 876T1218 950Z" />
+<glyph unicode="&#xe7;" horiz-adv-x="1072" d="M574 131Q673 131 747 191T829 341H1004Q999 248 940 164T783 30T574 -20Q353 -20 223 127T92 531V562Q92 720 150 843T316 1034T573 1102Q755 1102 875 993T1004 710H829Q821 815 750 882T573 950Q432 950 355
+849T277 555V520Q277 333 354 232T574 131ZM604 -9L592 -61Q745 -88 745 -234Q745 -331 665 -387T442 -444L435 -337Q514 -337 557 -311T601 -238Q601 -194 569 -173T439 -143L471 -9H604Z" />
+<glyph unicode="&#xe8;" horiz-adv-x="1085" d="M589 -20Q369 -20 231 124T93 511V545Q93 706 154 832T326 1030T566 1102Q777 1102 894 963T1011 565V488H278Q282 328 371 230T599 131Q697 131 765 171T884 277L997 189Q861 -20 589 -20ZM566 950Q454 950 378
+869T284 640H826V654Q818 795 750 872T566 950ZM671 1240H512L254 1534H477L671 1240Z" />
+<glyph unicode="&#xe9;" horiz-adv-x="1085" d="M589 -20Q369 -20 231 124T93 511V545Q93 706 154 832T326 1030T566 1102Q777 1102 894 963T1011 565V488H278Q282 328 371 230T599 131Q697 131 765 171T884 277L997 189Q861 -20 589 -20ZM566 950Q454 950 378
+869T284 640H826V654Q818 795 750 872T566 950ZM656 1534H880L612 1240H463L656 1534Z" />
+<glyph unicode="&#xea;" horiz-adv-x="1085" d="M589 -20Q369 -20 231 124T93 511V545Q93 706 154 832T326 1030T566 1102Q777 1102 894 963T1011 565V488H278Q282 328 371 230T599 131Q697 131 765 171T884 277L997 189Q861 -20 589 -20ZM566 950Q454 950 378
+869T284 640H826V654Q818 795 750 872T566 950ZM868 1262V1252H715L565 1422L416 1252H263V1264L509 1536H621L868 1262Z" />
+<glyph unicode="&#xeb;" horiz-adv-x="1085" d="M589 -20Q369 -20 231 124T93 511V545Q93 706 154 832T326 1030T566 1102Q777 1102 894 963T1011 565V488H278Q282 328 371 230T599 131Q697 131 765 171T884 277L997 189Q861 -20 589 -20ZM566 950Q454 950 378
+869T284 640H826V654Q818 795 750 872T566 950ZM244 1371Q244 1416 271 1446T353 1477T435 1447T463 1371T435 1296T353 1266T272 1296T244 1371ZM674 1369Q674 1414 701 1445T783 1476T865 1445T893 1369T865 1294T783 1264T702 1294T674 1369Z" />
+<glyph unicode="&#xec;" horiz-adv-x="506" d="M341 0H155V1082H341V0ZM615 1495H456L198 1789H421L615 1495Z" />
+<glyph unicode="&#xed;" horiz-adv-x="506" d="M341 0H155V1082H341V0ZM343 1789H567L299 1495H150L343 1789Z" />
+<glyph unicode="&#xee;" horiz-adv-x="506" d="M341 0H155V1082H341V0ZM556 1261V1251H403L253 1421L104 1251H-49V1263L197 1535H309L556 1261Z" />
+<glyph unicode="&#xef;" horiz-adv-x="506" d="M341 0H155V1082H341V0ZM-68 1370Q-68 1415 -41 1445T41 1476T123 1446T151 1370T123 1295T41 1265T-40 1295T-68 1370ZM362 1368Q362 1413 389 1444T471 1475T553 1444T581 1368T553 1293T471 1263T390 1293T362 1368Z" />
+<glyph unicode="&#xf0;" horiz-adv-x="1200" d="M820 1301Q1069 1037 1069 628V535Q1069 377 1011 251T844 52T602 -20Q467 -20 357 44T187 221T126 467Q126 614 182 730T341 912T574 977Q737 977 858 863Q810 1058 669 1199L451 1051L378 1150L570 1281Q438 1372
+255 1421L312 1580Q551 1526 726 1387L915 1516L988 1416L820 1301ZM884 635L882 691Q849 752 780 788T618 825Q473 825 392 730T311 467Q311 327 394 229T606 131Q731 131 807 244T884 541V635Z" />
+<glyph unicode="&#xf1;" horiz-adv-x="1130" d="M315 1082L321 946Q445 1102 645 1102Q988 1102 991 715V0H806V716Q805 833 753 889T589 945Q499 945 431 897T325 771V0H140V1082H315ZM927 1504Q927 1396 866 1329T712 1262Q671 1262 641 1272T563 1313T493 1350T443
+1357Q402 1357 373 1326T344 1245L220 1252Q220 1359 280 1429T434 1499Q469 1499 497 1489T573 1450T646 1412T703 1403Q746 1403 774 1437T803 1516L927 1504Z" />
+<glyph unicode="&#xf2;" horiz-adv-x="1168" d="M91 551Q91 710 153 837T327 1033T582 1102Q803 1102 939 949T1076 542V529Q1076 371 1016 246T843 50T584 -20Q364 -20 228 133T91 538V551ZM277 529Q277 349 360 240T584 131Q725 131 808 241T891 551Q891 729
+807 839T582 950Q445 950 361 841T277 529ZM681 1240H522L264 1534H487L681 1240Z" />
+<glyph unicode="&#xf3;" horiz-adv-x="1168" d="M91 551Q91 710 153 837T327 1033T582 1102Q803 1102 939 949T1076 542V529Q1076 371 1016 246T843 50T584 -20Q364 -20 228 133T91 538V551ZM277 529Q277 349 360 240T584 131Q725 131 808 241T891 551Q891 729
+807 839T582 950Q445 950 361 841T277 529ZM666 1534H890L622 1240H473L666 1534Z" />
+<glyph unicode="&#xf4;" horiz-adv-x="1168" d="M91 551Q91 710 153 837T327 1033T582 1102Q803 1102 939 949T1076 542V529Q1076 371 1016 246T843 50T584 -20Q364 -20 228 133T91 538V551ZM277 529Q277 349 360 240T584 131Q725 131 808 241T891 551Q891 729
+807 839T582 950Q445 950 361 841T277 529ZM878 1262V1252H725L575 1422L426 1252H273V1264L519 1536H631L878 1262Z" />
+<glyph unicode="&#xf5;" horiz-adv-x="1168" d="M91 551Q91 710 153 837T327 1033T582 1102Q803 1102 939 949T1076 542V529Q1076 371 1016 246T843 50T584 -20Q364 -20 228 133T91 538V551ZM277 529Q277 349 360 240T584 131Q725 131 808 241T891 551Q891 729
+807 839T582 950Q445 950 361 841T277 529ZM930 1504Q930 1396 869 1329T715 1262Q674 1262 644 1272T566 1313T496 1350T446 1357Q405 1357 376 1326T347 1245L223 1252Q223 1359 283 1429T437 1499Q472 1499 500 1489T576 1450T649 1412T706 1403Q749 1403 777
+1437T806 1516L930 1504Z" />
+<glyph unicode="&#xf6;" horiz-adv-x="1168" d="M91 551Q91 710 153 837T327 1033T582 1102Q803 1102 939 949T1076 542V529Q1076 371 1016 246T843 50T584 -20Q364 -20 228 133T91 538V551ZM277 529Q277 349 360 240T584 131Q725 131 808 241T891 551Q891 729
+807 839T582 950Q445 950 361 841T277 529ZM254 1371Q254 1416 281 1446T363 1477T445 1447T473 1371T445 1296T363 1266T282 1296T254 1371ZM684 1369Q684 1414 711 1445T793 1476T875 1445T903 1369T875 1294T793 1264T712 1294T684 1369Z" />
+<glyph unicode="&#xf7;" horiz-adv-x="1169" d="M1069 600H71V784H1069V600ZM461 1098Q461 1146 489 1178T575 1210T661 1178T691 1098Q691 1051 662 1020T575 989T490 1020T461 1098ZM461 281Q461 329 489 361T575 393T661 361T691 281Q691 235 662 204T575 172T490
+203T461 281Z" />
+<glyph unicode="&#xf8;" horiz-adv-x="1160" d="M91 551Q91 710 152 836T326 1032T582 1102Q692 1102 786 1060L859 1208H983L881 1003Q1076 849 1076 529Q1076 371 1014 244T840 49T584 -20Q480 -20 394 15L320 -134H196L296 69Q91 218 91 551ZM276 529Q276 335
+373 224L716 918Q654 950 582 950Q444 950 360 841T276 529ZM890 551Q890 733 803 844L463 156Q518 131 584 131Q723 131 806 240T890 535V551Z" />
+<glyph unicode="&#xf9;" horiz-adv-x="1129" d="M808 107Q700 -20 491 -20Q318 -20 228 80T136 378V1082H321V383Q321 137 521 137Q733 137 803 295V1082H988V0H812L808 107ZM673 1240H514L256 1534H479L673 1240Z" />
+<glyph unicode="&#xfa;" horiz-adv-x="1129" d="M808 107Q700 -20 491 -20Q318 -20 228 80T136 378V1082H321V383Q321 137 521 137Q733 137 803 295V1082H988V0H812L808 107ZM658 1534H882L614 1240H465L658 1534Z" />
+<glyph unicode="&#xfb;" horiz-adv-x="1129" d="M808 107Q700 -20 491 -20Q318 -20 228 80T136 378V1082H321V383Q321 137 521 137Q733 137 803 295V1082H988V0H812L808 107ZM870 1262V1252H717L567 1422L418 1252H265V1264L511 1536H623L870 1262Z" />
+<glyph unicode="&#xfc;" horiz-adv-x="1129" d="M808 107Q700 -20 491 -20Q318 -20 228 80T136 378V1082H321V383Q321 137 521 137Q733 137 803 295V1082H988V0H812L808 107ZM246 1371Q246 1416 273 1446T355 1477T437 1447T465 1371T437 1296T355 1266T274 1296T246
+1371ZM676 1369Q676 1414 703 1445T785 1476T867 1445T895 1369T867 1294T785 1264T704 1294T676 1369Z" />
+<glyph unicode="&#xfd;" horiz-adv-x="969" d="M494 271L746 1082H944L509 -167Q408 -437 188 -437L153 -434L84 -421V-271L134 -275Q228 -275 280 -237T367 -98L408 12L22 1082H224L494 271ZM599 1534H823L555 1240H406L599 1534Z" />
+<glyph unicode="&#xfe;" horiz-adv-x="1180" d="M1063 529Q1063 282 950 131T644 -20Q447 -20 334 105V-416H149V1536H334V970Q447 1102 641 1102Q836 1102 949 955T1063 546V529ZM878 550Q878 733 800 839T586 945Q418 945 334 796V279Q417 131 588 131Q721 131
+799 236T878 550Z" />
+<glyph unicode="&#xff;" horiz-adv-x="969" d="M494 271L746 1082H944L509 -167Q408 -437 188 -437L153 -434L84 -421V-271L134 -275Q228 -275 280 -237T367 -98L408 12L22 1082H224L494 271ZM187 1371Q187 1416 214 1446T296 1477T378 1447T406 1371T378 1296T296
+1266T215 1296T187 1371ZM617 1369Q617 1414 644 1445T726 1476T808 1445T836 1369T808 1294T726 1264T645 1294T617 1369Z" />
+<glyph unicode="&#x2013;" horiz-adv-x="1344" d="M1421 651H419V802H1421V651Z" />
+<glyph unicode="&#x2014;" horiz-adv-x="1599" d="M1737 651H401V802H1737V651Z" />
+<glyph unicode="&#x2018;" horiz-adv-x="409" d="M270 1555L376 1483Q283 1356 280 1209V1073H96V1189Q96 1291 144 1391T270 1555Z" />
+<glyph unicode="&#x2019;" horiz-adv-x="409" d="M153 1046L48 1118Q141 1248 144 1392V1536H327V1406Q326 1306 278 1207T153 1046Z" />
+<glyph unicode="&#x201a;" horiz-adv-x="407" d="M141 -283L36 -210Q127 -83 130 63V181H315V81Q315 -20 266 -121T141 -283Z" />
+<glyph unicode="&#x201c;" horiz-adv-x="724" d="M278 1555L384 1483Q291 1356 288 1209V1073H104V1189Q104 1291 152 1391T278 1555ZM593 1555L699 1483Q606 1356 603 1209V1073H419V1189Q419 1291 467 1391T593 1555Z" />
+<glyph unicode="&#x201d;" horiz-adv-x="731" d="M165 1046L60 1118Q153 1248 156 1392V1536H339V1406Q338 1306 290 1207T165 1046ZM472 1046L367 1118Q460 1248 463 1392V1536H646V1406Q645 1306 597 1207T472 1046Z" />
+<glyph unicode="&#x201e;" horiz-adv-x="705" d="M141 -301L36 -229Q127 -92 130 61V246H315V82Q315 -26 266 -131T141 -301ZM437 -301L332 -229Q423 -92 426 61V246H612V82Q612 -25 564 -129T437 -301Z" />
+<glyph unicode="&#x2022;" horiz-adv-x="690" d="M138 772Q138 859 193 915T341 971Q432 971 489 917T546 769V732Q546 645 491 590T342 535Q249 535 194 590T138 734V772Z" />
+<glyph unicode="&#x2039;" horiz-adv-x="614" d="M286 550L544 153H403L108 541V560L403 949H544L286 550Z" />
+<glyph unicode="&#x203a;" horiz-adv-x="614" d="M231 949L526 560V541L231 152H89L347 550L89 949H231Z" />
+</font>
+</defs>
+</svg>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.ttf
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.ttf b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.ttf
new file mode 100644
index 0000000..7b25f3c
Binary files /dev/null and b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.ttf differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.woff
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.woff b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.woff
new file mode 100644
index 0000000..941dfa4
Binary files /dev/null and b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.woff differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.woff2
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.woff2 b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.woff2
new file mode 100644
index 0000000..120796b
Binary files /dev/null and b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.woff2 differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/testem.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/testem.js b/contrib/views/hive20/src/main/resources/ui/testem.js
new file mode 100644
index 0000000..f186f06
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/testem.js
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*jshint node:true*/
+module.exports = {
+  "framework": "qunit",
+  "test_page": "tests/index.html?hidepassed",
+  "disable_watching": true,
+  "launch_in_ci": [
+    "PhantomJS"
+  ],
+  "launch_in_dev": [
+    "PhantomJS",
+    "Chrome"
+  ]
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/tests/.jshintrc
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/tests/.jshintrc b/contrib/views/hive20/src/main/resources/ui/tests/.jshintrc
new file mode 100644
index 0000000..7ec445c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/tests/.jshintrc
@@ -0,0 +1,53 @@
+{
+  "predef": [
+    "setBreakpoint",
+    "document",
+    "window",
+    "location",
+    "setTimeout",
+    "$",
+    "-Promise",
+    "define",
+    "console",
+    "visit",
+    "exists",
+    "fillIn",
+    "click",
+    "keyEvent",
+    "triggerEvent",
+    "find",
+    "findWithAssert",
+    "wait",
+    "DS",
+    "andThen",
+    "currentURL",
+    "currentPath",
+    "currentRouteName"
+  ],
+  "node": false,
+  "browser": false,
+  "boss": true,
+  "curly": true,
+  "debug": false,
+  "devel": false,
+  "eqeqeq": true,
+  "evil": true,
+  "forin": false,
+  "immed": false,
+  "laxbreak": false,
+  "newcap": true,
+  "noarg": true,
+  "noempty": false,
+  "nonew": false,
+  "nomen": false,
+  "onevar": false,
+  "plusplus": false,
+  "regexp": false,
+  "undef": true,
+  "sub": true,
+  "strict": false,
+  "white": false,
+  "eqnull": true,
+  "esversion": 6,
+  "unused": true
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/tests/helpers/destroy-app.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/tests/helpers/destroy-app.js b/contrib/views/hive20/src/main/resources/ui/tests/helpers/destroy-app.js
new file mode 100644
index 0000000..dfabf85
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/tests/helpers/destroy-app.js
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default function destroyApp(application) {
+  Ember.run(application, 'destroy');
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/tests/helpers/flash-message.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/tests/helpers/flash-message.js b/contrib/views/hive20/src/main/resources/ui/tests/helpers/flash-message.js
new file mode 100644
index 0000000..6c1fb29
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/tests/helpers/flash-message.js
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import FlashObject from 'ember-cli-flash/flash/object';
+
+const { K } = Ember;
+
+FlashObject.reopen({ init: K });

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/tests/helpers/module-for-acceptance.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/tests/helpers/module-for-acceptance.js b/contrib/views/hive20/src/main/resources/ui/tests/helpers/module-for-acceptance.js
new file mode 100644
index 0000000..cdee1a4
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/tests/helpers/module-for-acceptance.js
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { module } from 'qunit';
+import Ember from 'ember';
+import startApp from '../helpers/start-app';
+import destroyApp from '../helpers/destroy-app';
+
+const { RSVP: { Promise } } = Ember;
+
+export default function(name, options = {}) {
+  module(name, {
+    beforeEach() {
+      this.application = startApp();
+
+      if (options.beforeEach) {
+        return options.beforeEach.apply(this, arguments);
+      }
+    },
+
+    afterEach() {
+      let afterEach = options.afterEach && options.afterEach.apply(this, arguments);
+      return Promise.resolve(afterEach).then(() => destroyApp(this.application));
+    }
+  });
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/tests/helpers/resolver.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/tests/helpers/resolver.js b/contrib/views/hive20/src/main/resources/ui/tests/helpers/resolver.js
new file mode 100644
index 0000000..399978a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/tests/helpers/resolver.js
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Resolver from '../../resolver';
+import config from '../../config/environment';
+
+const resolver = Resolver.create();
+
+resolver.namespace = {
+  modulePrefix: config.modulePrefix,
+  podModulePrefix: config.podModulePrefix
+};
+
+export default resolver;

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/tests/helpers/responsive.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/tests/helpers/responsive.js b/contrib/views/hive20/src/main/resources/ui/tests/helpers/responsive.js
new file mode 100644
index 0000000..ba4a806
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/tests/helpers/responsive.js
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import MediaService from 'ember-responsive/media';
+
+const { K, getOwner } = Ember;
+const { classify } = Ember.String;
+
+MediaService.reopen({
+  // Change this if you want a different default breakpoint in tests.
+  _defaultBreakpoint: 'desktop',
+
+  _breakpointArr: Ember.computed('breakpoints', function() {
+    return Object.keys(this.get('breakpoints')) || Ember.A([]);
+  }),
+
+  _forceSetBreakpoint(breakpoint) {
+    let found = false;
+
+    const props = {};
+    this.get('_breakpointArr').forEach(function(bp) {
+      const val = bp === breakpoint;
+      if (val) {
+        found = true;
+      }
+
+      props[`is${classify(bp)}`] = val;
+    });
+
+    if (found) {
+      this.setProperties(props);
+    } else {
+      throw new Error(
+        `You tried to set the breakpoint to ${breakpoint}, which is not in your app/breakpoint.js file.`
+      );
+    }
+  },
+
+  match: K, // do not set up listeners in test
+
+  init() {
+    this._super(...arguments);
+
+    this._forceSetBreakpoint(this.get('_defaultBreakpoint'));
+  }
+});
+
+export default Ember.Test.registerAsyncHelper('setBreakpoint', function(app, breakpoint) {
+  // this should use getOwner once that's supported
+  const mediaService = app.__deprecatedInstance__.lookup('service:media');
+  mediaService._forceSetBreakpoint(breakpoint);
+});
+
+export function setBreakpointForIntegrationTest(container, breakpoint) {
+  const mediaService = getOwner(container).lookup('service:media');
+  mediaService._forceSetBreakpoint(breakpoint);
+  container.set('media', mediaService);
+
+  return mediaService;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/tests/helpers/start-app.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/tests/helpers/start-app.js b/contrib/views/hive20/src/main/resources/ui/tests/helpers/start-app.js
new file mode 100644
index 0000000..7b25773
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/tests/helpers/start-app.js
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import Application from '../../app';
+import config from '../../config/environment';
+
+export default function startApp(attrs) {
+  let application;
+
+  let attributes = Ember.merge({}, config.APP);
+  attributes = Ember.merge(attributes, attrs); // use defaults, but you can override;
+
+  Ember.run(() => {
+    application = Application.create(attributes);
+    application.setupForTesting();
+    application.injectTestHelpers();
+  });
+
+  return application;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/tests/index.html
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/tests/index.html b/contrib/views/hive20/src/main/resources/ui/tests/index.html
new file mode 100644
index 0000000..9987e2a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/tests/index.html
@@ -0,0 +1,51 @@
+<!--
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+  <head>
+    <meta charset="utf-8">
+    <meta http-equiv="X-UA-Compatible" content="IE=edge">
+    <title>Ui Tests</title>
+    <meta name="description" content="">
+    <meta name="viewport" content="width=device-width, initial-scale=1">
+
+    {{content-for "head"}}
+    {{content-for "test-head"}}
+
+    <link rel="stylesheet" href="{{rootURL}}assets/vendor.css">
+    <link rel="stylesheet" href="{{rootURL}}assets/ui.css">
+    <link rel="stylesheet" href="{{rootURL}}assets/test-support.css">
+
+    {{content-for "head-footer"}}
+    {{content-for "test-head-footer"}}
+  </head>
+  <body>
+    {{content-for "body"}}
+    {{content-for "test-body"}}
+
+    <script src="{{rootURL}}testem.js" integrity=""></script>
+    <script src="{{rootURL}}assets/vendor.js"></script>
+    <script src="{{rootURL}}assets/test-support.js"></script>
+    <script src="{{rootURL}}assets/ui.js"></script>
+    <script src="{{rootURL}}assets/tests.js"></script>
+
+    {{content-for "body-footer"}}
+    {{content-for "test-body-footer"}}
+  </body>
+</html>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/tests/integration/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/tests/integration/.gitkeep b/contrib/views/hive20/src/main/resources/ui/tests/integration/.gitkeep
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/tests/test-helper.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/tests/test-helper.js b/contrib/views/hive20/src/main/resources/ui/tests/test-helper.js
new file mode 100644
index 0000000..26c9751
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/tests/test-helper.js
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import resolver from './helpers/resolver';
+import './helpers/flash-message';
+
+import './helpers/responsive';
+
+import {
+  setResolver
+} from 'ember-qunit';
+
+setResolver(resolver);

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/tests/unit/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/tests/unit/.gitkeep b/contrib/views/hive20/src/main/resources/ui/tests/unit/.gitkeep
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/vendor/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/vendor/.gitkeep b/contrib/views/hive20/src/main/resources/ui/vendor/.gitkeep
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/view.log4j.properties
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/view.log4j.properties b/contrib/views/hive20/src/main/resources/view.log4j.properties
new file mode 100644
index 0000000..cbab11b
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/view.log4j.properties
@@ -0,0 +1,27 @@
+# Copyright 2011 The Apache Software Foundation
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+log4j.appender.hive20=org.apache.log4j.RollingFileAppender
+log4j.appender.hive20.File=${ambari.log.dir}/hive20-view/hive20-view.log
+log4j.appender.hive20.MaxFileSize=80MB
+log4j.appender.hive20.MaxBackupIndex=60
+log4j.appender.hive20.layout=org.apache.log4j.PatternLayout
+log4j.appender.hive20.layout.ConversionPattern=%d{DATE} %5p [%t] [%X{viewName} %X{viewVersion} %X{viewInstanceName}] %c{1}:%L - %m%n
+
+log4j.logger.org.apache.ambari.view.hive20=INFO,hive20
+log4j.additivity.org.apache.ambari.view.hive20=false
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/view.xml b/contrib/views/hive20/src/main/resources/view.xml
new file mode 100644
index 0000000..315d0a9
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/view.xml
@@ -0,0 +1,362 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<view>
+    <name>HIVE</name>
+    <label>Hive</label>
+    <version>2.0.0</version>
+    <build>${env.BUILD_NUMBER}</build>
+
+    <min-ambari-version>2.0.*</min-ambari-version>
+
+    <data-version>1</data-version>
+    <data-migrator-class>org.apache.ambari.view.hive20.DataMigrator</data-migrator-class>
+
+    <validator-class>org.apache.ambari.view.hive20.PropertyValidator</validator-class>
+    <view-class>org.apache.ambari.view.hive20.HiveViewImpl</view-class>
+
+    <!-- Hive Configs -->
+    <parameter>
+      <name>hive.jdbc.url</name>
+      <description>Enter JDBC Url to connect to Hive Server 2</description>
+      <label>HiveServer2 JDBC Url</label>
+      <placeholder>jdbc:hive2://127.0.0.1:10000</placeholder>
+      <cluster-config>fake</cluster-config>
+      <required>true</required>
+    </parameter>
+
+    <parameter>
+      <name>hive.session.params</name>
+      <description>Semicolon-separated key value parameters to be used in JDBC URL generation to connect to hive server 2</description>
+      <label>Hive Session Parameters</label>
+      <placeholder>transportMode=http;httpPath=cliservice</placeholder>
+      <default-value></default-value>
+      <required>false</required>
+    </parameter>
+
+    <parameter>
+        <name>hive.ldap.configured</name>
+        <description>Set to true if Hive server is configured through LDAP</description>
+        <label>Hive LDAP configuration</label>
+        <placeholder>false</placeholder>
+        <default-value>false</default-value>
+        <required>false</required>
+    </parameter>
+
+    <parameter>
+        <name>hive.metastore.warehouse.dir</name>
+        <description>Hive Metastore directory (example: /apps/hive/warehouse)</description>
+        <label>Hive Metastore directory</label>
+        <placeholder>/apps/hive/warehouse</placeholder>
+        <default-value>/apps/hive/warehouse</default-value>
+        <cluster-config>hive-site/hive.metastore.warehouse.dir</cluster-config>
+        <required>false</required>
+    </parameter>
+
+    <!-- HDFS Configs -->
+    <parameter>
+        <name>webhdfs.url</name>
+        <description>Enter the WebHDFS FileSystem URI. Typically this is the dfs.namenode.http-address
+            property in the hdfs-site.xml configuration. URL must be accessible from Ambari Server.</description>
+        <label>WebHDFS FileSystem URI</label>
+        <placeholder>webhdfs://namenode:50070</placeholder>
+        <required>true</required>
+        <cluster-config>core-site/fs.defaultFS</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.nameservices</name>
+        <description>Comma-separated list of nameservices. Value of hdfs-site/dfs.nameservices property</description>
+        <label>Logical name of the NameNode cluster</label>
+        <required>false</required>
+        <cluster-config>hdfs-site/dfs.nameservices</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenodes.list</name>
+        <description>Comma-separated list of namenodes for a given nameservice.
+          Value of hdfs-site/dfs.ha.namenodes.[nameservice] property</description>
+        <label>List of NameNodes</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.rpc-address.nn1</name>
+        <description>RPC address for first name node.
+          Value of hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode1] property</description>
+        <label>First NameNode RPC Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.rpc-address.nn2</name>
+        <description>RPC address for second name node.
+          Value of hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode2] property</description>
+        <label>Second NameNode RPC Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.http-address.nn1</name>
+        <description>WebHDFS address for first name node.
+          Value of hdfs-site/dfs.namenode.http-address.[nameservice].[namenode1] property</description>
+        <label>First NameNode HTTP (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.http-address.nn2</name>
+        <description>WebHDFS address for second name node.
+          Value of hdfs-site/dfs.namenode.http-address.[nameservice].[namenode2] property</description>
+        <label>Second NameNode HTTP (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.https-address.nn1</name>
+        <description>WebHDFS Https address for first name node.
+            Value of hdfs-site/dfs.namenode.https-address.[nameservice].[namenode1] property</description>
+        <label>First NameNode HTTPS (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.https-address.nn2</name>
+        <description>WebHDFS Https address for second name node.
+            Value of hdfs-site/dfs.namenode.https-address.[nameservice].[namenode2] property</description>
+        <label>Second NameNode HTTPS (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.client.failover.proxy.provider</name>
+        <description>The Java class that HDFS clients use to contact the Active NameNode
+          Value of hdfs-site/dfs.client.failover.proxy.provider.[nameservice] property</description>
+        <label>Failover Proxy Provider</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+
+    <parameter>
+        <name>webhdfs.username</name>
+        <description>doAs for proxy user for HDFS. By default, uses the currently logged-in Ambari user.</description>
+        <label>WebHDFS Username</label>
+        <default-value>${username}</default-value>
+        <required>false</required>
+    </parameter>
+
+    <parameter>
+        <name>webhdfs.auth</name>
+        <description>Semicolon-separated authentication configs.</description>
+        <label>WebHDFS Authentication</label>
+        <placeholder>auth=SIMPLE</placeholder>
+        <required>false</required>
+    </parameter>
+
+    <parameter>
+        <name>hdfs.umask-mode</name>
+        <description>The umask used when creating files and directories. Defaults to 022</description>
+        <label>Umask</label>
+        <default-value>022</default-value>
+        <required>false</required>
+        <cluster-config>hdfs-site/fs.permissions.umask-mode</cluster-config>
+    </parameter>
+
+    <parameter>
+        <name>hdfs.auth_to_local</name>
+        <description>Auth to Local Configuration</description>
+        <label>Auth To Local</label>
+        <required>false</required>
+        <cluster-config>core-site/hadoop.security.auth_to_local</cluster-config>
+    </parameter>
+
+    <!-- General Configs -->
+
+    <parameter>
+        <name>views.tez.instance</name>
+        <description>Instance name of Tez view.</description>
+        <label>Instance name of Tez view</label>
+        <required>false</required>
+    </parameter>
+
+    <parameter>
+        <name>scripts.dir</name>
+        <description>HDFS directory path to store Hive scripts.</description>
+        <label>Scripts HDFS Directory</label>
+        <placeholder>/user/${username}/hive/scripts</placeholder>
+        <default-value>/user/${username}/hive/scripts</default-value>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>jobs.dir</name>
+        <description>HDFS directory path to store Hive job status.</description>
+        <label>Jobs HDFS Directory</label>
+        <placeholder>/user/${username}/hive/jobs</placeholder>
+        <default-value>/user/${username}/hive/jobs</default-value>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>scripts.settings.defaults-file</name>
+        <description>File path for saving default settings for query</description>
+        <label>Default script settings file</label>
+        <default-value>/user/${username}/.${instanceName}.defaultSettings</default-value>
+        <required>true</required>
+    </parameter>
+
+
+    <parameter>
+        <name>use.hive.interactive.mode</name>
+        <description>Connects to the hive interactive server if set true</description>
+        <label>Use Interactive Mode(Tech Preview)</label>
+        <default-value>false</default-value>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>yarn.ats.url</name>
+        <description>The URL to the YARN Application Timeline Server, used to provide Jobs information, typically, this is the yarn.timeline-service.webapp.address property in the yarn-site.xml configuration.</description>
+        <label>YARN Application Timeline Server URL</label>
+        <placeholder>http://yarn.ats.address:8188</placeholder>
+        <cluster-config>yarn-site/yarn.timeline-service.webapp.address</cluster-config>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>yarn.resourcemanager.url</name>
+        <description>The URL to the YARN ResourceManager, used to provide YARN Application data. If YARN ResourceManager HA is enabled, provide a comma separated list of URLs for all the Resource Managers.</description>
+        <label>YARN ResourceManager URL</label>
+        <placeholder>http://yarn.resourcemanager.address:8088</placeholder>
+        <cluster-config>yarn-site/yarn.resourcemanager.webapp.address</cluster-config>
+        <required>true</required>
+    </parameter>
+
+    <resource>
+        <name>savedQuery</name>
+        <plural-name>savedQueries</plural-name>
+        <id-property>id</id-property>
+        <resource-class>org.apache.ambari.view.hive20.resources.savedQueries.SavedQuery</resource-class>
+        <provider-class>org.apache.ambari.view.hive20.resources.savedQueries.SavedQueryResourceProvider</provider-class>
+        <service-class>org.apache.ambari.view.hive20.resources.savedQueries.SavedQueryService</service-class>
+    </resource>
+
+    <resource>
+        <name>fileResource</name>
+        <plural-name>fileResources</plural-name>
+        <id-property>id</id-property>
+        <resource-class>org.apache.ambari.view.hive20.resources.resources.FileResourceItem</resource-class>
+        <provider-class>org.apache.ambari.view.hive20.resources.resources.FileResourceResourceProvider</provider-class>
+        <service-class>org.apache.ambari.view.hive20.resources.resources.FileResourceService</service-class>
+    </resource>
+
+    <resource>
+        <name>udf</name>
+        <plural-name>udfs</plural-name>
+        <id-property>id</id-property>
+        <resource-class>org.apache.ambari.view.hive20.resources.udfs.UDF</resource-class>
+        <provider-class>org.apache.ambari.view.hive20.resources.udfs.UDFResourceProvider</provider-class>
+        <service-class>org.apache.ambari.view.hive20.resources.udfs.UDFService</service-class>
+    </resource>
+
+    <resource>
+        <name>jobs</name>
+        <service-class>org.apache.ambari.view.hive20.resources.jobs.JobService</service-class>
+    </resource>
+
+    <resource>
+        <name>upload</name>
+        <plural-name>uploads</plural-name>
+        <service-class>org.apache.ambari.view.hive20.resources.uploads.UploadService</service-class>
+    </resource>
+
+    <resource>
+        <name>files</name>
+        <service-class>org.apache.ambari.view.hive20.resources.files.FileService</service-class>
+    </resource>
+
+    <!--<resource>
+        <name>ddl</name>
+        <service-class>org.apache.ambari.view.hive2.resources.browser.HiveBrowserService</service-class>
+    </resource>-->
+
+    <resource>
+        <name>hive</name>
+        <service-class>org.apache.ambari.view.hive20.HelpService</service-class>
+    </resource>
+
+
+    <resource>
+        <name>connection</name>
+        <service-class>org.apache.ambari.view.hive20.resources.browser.ConnectionService</service-class>
+    </resource>
+
+    <resource>
+        <name>system</name>
+        <service-class>org.apache.ambari.view.hive20.resources.system.SystemService</service-class>
+    </resource>
+
+    <resource>
+        <name>settings</name>
+        <resource-class>org.apache.ambari.view.hive20.resources.settings.Setting</resource-class>
+        <service-class>org.apache.ambari.view.hive20.resources.settings.SettingsService</service-class>
+    </resource>
+
+    <resource>
+        <name>ddl</name>
+        <service-class>org.apache.ambari.view.hive20.resources.browser.DDLService</service-class>
+    </resource>
+
+    <resource>
+        <name>directories</name>
+        <service-class>org.apache.ambari.view.hive20.resources.browser.FileService</service-class>
+    </resource>
+
+    <persistence>
+        <entity>
+            <class>org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive20.resources.savedQueries.SavedQuery</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive20.resources.udfs.UDF</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive20.resources.resources.FileResourceItem</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive20.resources.settings.Setting</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive20.TestBean</class>
+            <id-property>id</id-property>
+        </entity>
+    </persistence>
+
+    <auto-instance>
+        <name>AUTO_HIVE20_INSTANCE</name>
+        <label>Hive View 2.0</label>
+        <description>This view instance is auto created when the Hive service is added to a cluster.</description>
+        <stack-id>HDP-2.*</stack-id>
+        <services>
+            <service>HIVE</service>
+        </services>
+    </auto-instance>
+</view>


[10/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadService.java
new file mode 100644
index 0000000..835626d
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadService.java
@@ -0,0 +1,565 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads;
+
+import com.sun.jersey.core.header.FormDataContentDisposition;
+import com.sun.jersey.multipart.FormDataParam;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.resources.uploads.CSVParams;
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.ConnectionFactory;
+import org.apache.ambari.view.hive20.ConnectionSystem;
+import org.apache.ambari.view.hive20.client.DDLDelegator;
+import org.apache.ambari.view.hive20.client.DDLDelegatorImpl;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobController;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobResourceManager;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.PreviewData;
+import org.apache.ambari.view.hive20.resources.uploads.query.DeleteQueryInput;
+import org.apache.ambari.view.hive20.resources.uploads.query.InsertFromQueryInput;
+import org.apache.ambari.view.hive20.resources.uploads.query.QueryGenerator;
+import org.apache.ambari.view.hive20.resources.uploads.query.TableInfo;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+import org.apache.ambari.view.utils.ambari.AmbariApi;
+import org.apache.commons.io.input.ReaderInputStream;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * UI driven end points for creation of new hive table and inserting data into it.
+ * It uploads a file, parses it partially based on its type, generates preview,
+ * creates temporary hive table for storage as CSV and actual hive table,
+ * uploads the file again, parses it, create CSV stream and upload to hdfs in temporary table,
+ * insert rows from temporary table to actual table, delete temporary table.
+ * <p/>
+ * API:
+ * POST /preview : takes stream, parses it and returns preview rows, headers and column type suggestions
+ * POST /createTable : runs hive query to create table in hive
+ * POST /upload : takes stream, parses it and converts it into CSV and uploads it to the temporary table
+ * POST /insertIntoTable : runs hive query to insert data from temporary table to actual hive table
+ * POST /deleteTable : deletes the temporary table
+ */
+public class UploadService extends BaseService {
+
+  private final static Logger LOG =
+    LoggerFactory.getLogger(UploadService.class);
+
+  @Inject
+  protected ViewContext context;
+
+  private AmbariApi ambariApi;
+  protected JobResourceManager resourceManager;
+
+  final private static String HIVE_METASTORE_LOCATION_KEY = "hive.metastore.warehouse.dir";
+  final private static String HIVE_SITE = "hive-site";
+  final private static String HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY = HIVE_METASTORE_LOCATION_KEY;
+  private static final String HIVE_DEFAULT_METASTORE_LOCATION = "/apps/hive/warehouse";
+  final private static String HIVE_DEFAULT_DB = "default";
+
+  public void validateForUploadFile(UploadFromHdfsInput input){
+    if( null == input.getInputFileType()){
+      throw new IllegalArgumentException("inputFileType parameter cannot be null.");
+    }
+    if( null == input.getHdfsPath()){
+      throw new IllegalArgumentException("hdfsPath parameter cannot be null.");
+    }
+    if( null == input.getTableName()){
+      throw new IllegalArgumentException("tableName parameter cannot be null.");
+    }
+    if( null == input.getDatabaseName()){
+      throw new IllegalArgumentException("databaseName parameter cannot be null.");
+    }
+
+    if( input.getIsFirstRowHeader() == null ){
+      input.setIsFirstRowHeader(false);
+    }
+  }
+
+  public void validateForPreview(UploadFromHdfsInput input){
+    if( input.getIsFirstRowHeader() == null ){
+      input.setIsFirstRowHeader(false);
+    }
+
+    if( null == input.getInputFileType()){
+      throw new IllegalArgumentException("inputFileType parameter cannot be null.");
+    }
+    if( null == input.getHdfsPath()){
+      throw new IllegalArgumentException("hdfsPath parameter cannot be null.");
+    }
+  }
+
+  @POST
+  @Path("/previewFromHdfs")
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response uploadForPreviewFromHDFS(UploadFromHdfsInput input) {
+    InputStream uploadedInputStream = null;
+    try {
+      uploadedInputStream = getHDFSFileStream(input.getHdfsPath());
+      this.validateForPreview(input);
+      CSVParams csvParams = getCsvParams(input.getCsvDelimiter(), input.getCsvQuote(), input.getCsvEscape());
+      PreviewData pd = generatePreview(input.getIsFirstRowHeader(), input.getInputFileType(), csvParams, uploadedInputStream);
+      String tableName = getBasenameFromPath(input.getHdfsPath());
+      return createPreviewResponse(pd, input.getIsFirstRowHeader(), tableName);
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
+    } catch (Exception e) {
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
+    } finally {
+      if (null != uploadedInputStream) {
+        try {
+          uploadedInputStream.close();
+        } catch (IOException e) {
+          LOG.error("Exception occured while closing the HDFS file stream for path " + input.getHdfsPath(), e);
+        }
+      }
+    }
+  }
+
+  @POST
+  @Path("/preview")
+  @Consumes(MediaType.MULTIPART_FORM_DATA)
+  public Response uploadForPreview(
+    @FormDataParam("file") InputStream uploadedInputStream,
+    @FormDataParam("file") FormDataContentDisposition fileDetail,
+    @FormDataParam("isFirstRowHeader") Boolean isFirstRowHeader,
+    @FormDataParam("inputFileType") String inputFileType,
+    @FormDataParam("csvDelimiter") String csvDelimiter,
+    @FormDataParam("csvEscape") String csvEscape,
+    @FormDataParam("csvQuote") String csvQuote
+  ) {
+    try {
+      if( null == inputFileType)
+        throw new IllegalArgumentException("inputFileType parameter cannot be null.");
+
+      if( null == isFirstRowHeader )
+        isFirstRowHeader = false;
+
+      CSVParams csvParams = getCsvParams(csvDelimiter, csvQuote, csvEscape);
+
+      PreviewData pd = generatePreview(isFirstRowHeader, inputFileType, csvParams, uploadedInputStream);
+      return createPreviewResponse(pd, isFirstRowHeader, getBasename(fileDetail.getFileName()));
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
+    } catch (Exception e) {
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  private CSVParams getCsvParams(String csvDelimiter, String csvQuote, String csvEscape) {
+    char csvq =  CSVParams.DEFAULT_QUOTE_CHAR;
+    char csvd =  CSVParams.DEFAULT_DELIMITER_CHAR;
+    char csve =  CSVParams.DEFAULT_ESCAPE_CHAR;
+
+    if(null != csvDelimiter){
+      char[] csvdArray = csvDelimiter.toCharArray();
+      if(csvdArray.length > 0 ) {
+        csvd = csvdArray[0];
+      }
+    }
+
+    if(null != csvQuote){
+      char[] csvqArray = csvQuote.toCharArray();
+      if(csvqArray.length > 0 ) {
+        csvq = csvqArray[0];
+      }
+    }
+
+    if(null != csvEscape){
+      char[] csveArray = csvEscape.toCharArray();
+      if(csveArray.length > 0 ) {
+        csve = csveArray[0];
+      }
+    }
+
+    return new CSVParams(csvd, csvq, csve);
+  }
+
+
+  @Path("/createTable")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response createTable(TableInput tableInput) {
+    try {
+      tableInput.validate();
+      String databaseName = tableInput.getDatabaseName();
+      String tableCreationQuery = generateCreateQuery(tableInput);
+      LOG.info("tableCreationQuery : {}", tableCreationQuery);
+
+      Job job = createJob(tableCreationQuery, databaseName);
+      LOG.info("job created for table creation {}", job);
+      return Response.ok(job).build();
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
+    } catch (Throwable e) {
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @Path("/uploadFromHDFS")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response uploadFileFromHdfs(UploadFromHdfsInput input) {
+    // create stream and upload
+    InputStream hdfsStream = null;
+    try {
+      hdfsStream = getHDFSFileStream(input.getHdfsPath());
+      CSVParams csvParams = getCsvParams(input.getCsvDelimiter(), input.getCsvQuote(), input.getCsvEscape());
+      String path = uploadFileFromStream(hdfsStream, input.getIsFirstRowHeader(), input.getInputFileType(), input.getTableName(), input.getDatabaseName(), input.getHeader(), input.isContainsEndlines(), csvParams);
+
+      JSONObject jo = new JSONObject();
+      jo.put("uploadedPath", path);
+
+      return Response.ok(jo).build();
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
+    } catch (Exception e) {
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
+    } finally {
+      if (null != hdfsStream)
+        try {
+          hdfsStream.close();
+        } catch (IOException e) {
+          LOG.error("Exception occured while closing the HDFS stream for path : " + input.getHdfsPath(), e);
+        }
+    }
+  }
+
+  @Path("/upload")
+  @POST
+  @Consumes(MediaType.MULTIPART_FORM_DATA)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response uploadFile(
+    @FormDataParam("file") InputStream uploadedInputStream,
+    @FormDataParam("file") FormDataContentDisposition fileDetail,
+    @FormDataParam("isFirstRowHeader") Boolean isFirstRowHeader,
+    @FormDataParam("inputFileType") String inputFileType,   // the format of the file uploaded. CSV/JSON etc.
+    @FormDataParam("tableName") String tableName,
+    @FormDataParam("databaseName") String databaseName,
+    @FormDataParam("header") String header,
+    @FormDataParam("containsEndlines") boolean containsEndlines,
+    @FormDataParam("csvDelimiter") String csvDelimiter,
+    @FormDataParam("csvEscape") String csvEscape,
+    @FormDataParam("csvQuote") String csvQuote
+
+  ) {
+    try {
+      CSVParams csvParams = getCsvParams(csvDelimiter, csvQuote, csvEscape);
+      ObjectMapper mapper = new ObjectMapper();
+      List<ColumnDescriptionImpl> columnList = mapper.readValue(header, new TypeReference<List<ColumnDescriptionImpl>>(){});
+      String path = uploadFileFromStream(uploadedInputStream, isFirstRowHeader, inputFileType, tableName, databaseName, columnList, containsEndlines, csvParams);
+
+      JSONObject jo = new JSONObject();
+      jo.put("uploadedPath", path);
+      return Response.ok(jo).build();
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
+    } catch (Exception e) {
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @Path("/insertIntoTable")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response insertFromTempTable(InsertFromQueryInput input) {
+    try {
+      String insertQuery = generateInsertFromQuery(input);
+      LOG.info("insertQuery : {}", insertQuery);
+
+      Job job = createJob(insertQuery, "default");
+      LOG.info("Job created for insert from temp table : {}", job);
+      return Response.ok(job).build();
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
+    } catch (Throwable e) {
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @Path("/deleteTable")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response deleteTable(DeleteQueryInput input) {
+    try {
+      String deleteQuery = generateDeleteQuery(input);
+      LOG.info("deleteQuery : {}", deleteQuery);
+
+      Job job = createJob(deleteQuery, "default");
+      LOG.info("Job created for delete temp table : {} ", job);
+      return Response.ok(job).build();
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
+    } catch (Throwable e) {
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  private String uploadIntoTable(Reader reader, String databaseName, String tempTableName) {
+    try {
+      String fullPath = getHiveMetaStoreLocation(databaseName, tempTableName);
+      LOG.info("Uploading file into : {}", fullPath);
+      uploadFile(fullPath, new ReaderInputStream(reader));
+      return fullPath;
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
+    } catch (Exception e) {
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  private synchronized JobResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      SharedObjectsFactory connectionsFactory = getSharedObjectsFactory();
+      resourceManager = new JobResourceManager(connectionsFactory, context);
+    }
+    return resourceManager;
+  }
+
+  private synchronized AmbariApi getAmbariApi() {
+    if (null == ambariApi) {
+      ambariApi = new AmbariApi(this.context);
+    }
+    return ambariApi;
+  }
+
+  private String generateCreateQuery(TableInfo ti) {
+    return new QueryGenerator().generateCreateQuery(ti);
+  }
+
+  private String generateInsertFromQuery(InsertFromQueryInput input) {
+    return new QueryGenerator().generateInsertFromQuery(input);
+  }
+
+  private String generateDeleteQuery(DeleteQueryInput deleteQueryInput) {
+    return new QueryGenerator().generateDropTableQuery(deleteQueryInput);
+  }
+
+  private Job createJob(String query, String databaseName) throws Throwable{
+    Map jobInfo = new HashMap<>();
+    jobInfo.put("title", "Internal Job");
+    jobInfo.put("forcedContent", query);
+    jobInfo.put("dataBase", databaseName);
+
+    Job job = new JobImpl(jobInfo);
+    LOG.info("creating job : {}", job);
+    getResourceManager().create(job);
+
+    JobController createdJobController = getResourceManager().readController(job.getId());
+    createdJobController.submit();
+    getResourceManager().saveIfModified(createdJobController);
+
+    return job;
+  }
+
+  private String getHiveMetaStoreLocation(String db, String table) {
+    String locationColValue = "Location:";
+    String urlString = null;
+    DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
+    List<Row> result = delegator.getTableDescriptionFormatted(ConnectionFactory.create(context), db, table);
+    for (Row row : result) {
+      if (row != null && row.getRow().length > 1 && row.getRow()[0] != null &&  row.getRow()[0].toString().trim().equals(locationColValue)) {
+        urlString = row.getRow()[1] == null ? null : row.getRow()[1].toString();
+        break;
+      }
+    }
+
+    String tablePath = null;
+    if (null != urlString) {
+      try {
+        URI uri = new URI(urlString);
+        tablePath = uri.getPath();
+      } catch (URISyntaxException e) {
+        LOG.debug("Error occurred while parsing as url : ", urlString, e);
+      }
+    } else {
+      String basePath = getHiveMetaStoreLocation();
+      if (!basePath.endsWith("/")) {
+        basePath = basePath + "/";
+      }
+      if (db != null && !db.equals(HIVE_DEFAULT_DB)) {
+        basePath = basePath + db + ".db/";
+      }
+      tablePath = basePath + table;
+    }
+
+    return tablePath + "/" + table ;
+  }
+
+  private String getHiveMetaStoreLocation() {
+    String dir = context.getProperties().get(HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY);
+    if (dir != null && !dir.trim().isEmpty()) {
+      return dir;
+    } else {
+      LOG.debug("Neither found associated cluster nor found the view property {}. Returning default location : {}", HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY, HIVE_DEFAULT_METASTORE_LOCATION);
+      return HIVE_DEFAULT_METASTORE_LOCATION;
+    }
+  }
+
+  private void uploadFile(final String filePath, InputStream uploadedInputStream)
+    throws IOException, InterruptedException {
+    byte[] chunk = new byte[1024];
+    FSDataOutputStream out = getSharedObjectsFactory().getHdfsApi().create(filePath, false);
+    int n = -1;
+    while ((n = uploadedInputStream.read(chunk)) != -1) {
+      out.write(chunk, 0, n);
+    }
+    out.close();
+  }
+
+  private static String getErrorMessage(WebApplicationException e) {
+    if (null != e.getResponse() && null != e.getResponse().getEntity())
+      return e.getResponse().getEntity().toString();
+    else return e.getMessage();
+  }
+
+  private PreviewData generatePreview(Boolean isFirstRowHeader, String inputFileType, CSVParams csvParams, InputStream uploadedInputStream) throws Exception {
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, inputFileType);
+    if (inputFileType.equals(ParseOptions.InputFileType.CSV.toString())){
+      if(isFirstRowHeader)
+        parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+      else
+        parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.NONE.toString());
+
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_DELIMITER, csvParams.getCsvDelimiter());
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR, csvParams.getCsvEscape());
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_QUOTE, csvParams.getCsvQuote());
+    }
+    else
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    LOG.info("isFirstRowHeader : {}, inputFileType : {}", isFirstRowHeader, inputFileType);
+
+    DataParser dataParser = new DataParser(new InputStreamReader(uploadedInputStream), parseOptions);
+
+    return dataParser.parsePreview();
+  }
+
+  private Response createPreviewResponse(PreviewData pd, Boolean isFirstRowHeader, String tableName) {
+    Map<String, Object> retData = new HashMap<>();
+    retData.put("header", pd.getHeader());
+    retData.put("rows", pd.getPreviewRows());
+    retData.put("isFirstRowHeader", isFirstRowHeader);
+    retData.put("tableName", tableName);
+
+    JSONObject jsonObject = new JSONObject(retData);
+    return Response.ok(jsonObject).build();
+  }
+
+  private InputStream getHDFSFileStream(String path) throws IOException, InterruptedException {
+    FSDataInputStream fsStream = getSharedObjectsFactory().getHdfsApi().open(path);
+    return fsStream;
+  }
+
+  private String uploadFileFromStream(
+    InputStream uploadedInputStream,
+    Boolean isFirstRowHeader,
+    String inputFileType,   // the format of the file uploaded. CSV/JSON etc.
+    String tableName,
+    String databaseName,
+    List<ColumnDescriptionImpl> header,
+    boolean containsEndlines,
+    CSVParams csvParams
+  ) throws Exception {
+    LOG.info(" uploading file into databaseName {}, tableName {}", databaseName, tableName);
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, inputFileType);
+    if(isFirstRowHeader){
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+    }else{
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.NONE.toString());
+    }
+
+    if(null != csvParams){
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_DELIMITER, csvParams.getCsvDelimiter());
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR, csvParams.getCsvEscape());
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_QUOTE, csvParams.getCsvQuote());
+    }
+
+    DataParser dataParser = new DataParser(new InputStreamReader(uploadedInputStream), parseOptions);
+
+    Reader csvReader = new TableDataReader(dataParser.iterator(), header, containsEndlines); // encode column values into HEX so that \n etc dont appear in the hive table data
+    String path = uploadIntoTable(csvReader, databaseName, tableName);
+    return path;
+  }
+
+  private String getBasenameFromPath(String path) {
+    String fileName = new File(path).getName();
+    return getBasename(fileName);
+  }
+
+  private String getBasename(String fileName) {
+    int index = fileName.indexOf(".");
+    if (index != -1) {
+      return fileName.substring(0, index);
+    }
+
+    return fileName;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/DataParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/DataParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/DataParser.java
new file mode 100644
index 0000000..5ba7a8b
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/DataParser.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.csv.opencsv.OpenCSVParser;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.json.JSONParser;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.xml.XMLParser;
+
+import java.io.Reader;
+import java.util.Iterator;
+
+/**
+ * Wrapper/Decorator over the Stream parsers.
+ * Supports XML/JSON/CSV parsing.
+ */
+public class DataParser implements IParser {
+
+  private IParser parser;
+
+  public DataParser(Reader reader, ParseOptions parseOptions) throws Exception {
+    if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.CSV.toString())) {
+      parser = new OpenCSVParser(reader, parseOptions);
+    } else if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.JSON.toString())) {
+      parser = new JSONParser(reader, parseOptions);
+    } else if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.XML.toString())) {
+      parser = new XMLParser(reader, parseOptions);
+    }
+  }
+
+  @Override
+  public PreviewData parsePreview() {
+    return parser.parsePreview();
+  }
+
+  @Override
+  public Row extractHeader() {
+    return parser.extractHeader();
+  }
+
+  @Override
+  public void close() throws Exception {
+    parser.close();
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return parser.iterator();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/EndOfDocumentException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/EndOfDocumentException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/EndOfDocumentException.java
new file mode 100644
index 0000000..2128fab
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/EndOfDocumentException.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers;
+
+
+public class EndOfDocumentException extends Exception {
+  public EndOfDocumentException() {
+  }
+
+  public EndOfDocumentException(String message) {
+    super(message);
+  }
+
+  public EndOfDocumentException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public EndOfDocumentException(Throwable cause) {
+    super(cause);
+  }
+
+  public EndOfDocumentException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/IParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/IParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/IParser.java
new file mode 100644
index 0000000..7294d2e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/IParser.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive20.client.Row;
+
+/**
+ * Interface defining methods for Parsers that can used for generating preview
+ * and uploading table into hive.
+ */
+public interface IParser extends Iterable<Row>, AutoCloseable{
+
+  PreviewData parsePreview();
+
+  Row extractHeader();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/ParseOptions.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/ParseOptions.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/ParseOptions.java
new file mode 100644
index 0000000..cb513a3
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/ParseOptions.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers;
+
+import java.util.HashMap;
+
+public class ParseOptions {
+  public static final String OPTIONS_CSV_DELIMITER = "OPTIONS_CSV_DELIMITER";
+  public static final String OPTIONS_CSV_QUOTE = "OPTIONS_CSV_QUOTE";
+  public static final String OPTIONS_HEADERS = "OPTIONS_HEADERS";
+  public static final String OPTIONS_CSV_ESCAPE_CHAR = "OPTIONS_CSV_ESCAPE_CHAR";
+
+  public enum InputFileType {
+    CSV,
+    JSON,
+    XML
+  }
+
+  public enum HEADER {
+    FIRST_RECORD,
+    PROVIDED_BY_USER, // not used right now but can be used when some metadata of file provide this information
+    EMBEDDED, // this one is for JSON/ XML and may be other file formats where its embedded with the data
+    NONE   // if the file does not contain header information at all
+  }
+  final public static String OPTIONS_FILE_TYPE = "FILE_TYPE";
+  final public static String OPTIONS_HEADER = "HEADER";
+  final public static String OPTIONS_NUMBER_OF_PREVIEW_ROWS = "NUMBER_OF_PREVIEW_ROWS";
+
+  private HashMap<String, Object> options = new HashMap<>();
+
+  public void setOption(String key, Object value) {
+    this.options.put(key, value);
+  }
+
+  public Object getOption(String key) {
+    return this.options.get(key);
+  }
+
+  @Override
+  public String toString() {
+    return new StringBuilder("ParseOptions{")
+      .append("options=").append(options)
+      .append('}').toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/ParseUtils.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/ParseUtils.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/ParseUtils.java
new file mode 100644
index 0000000..27d03f2
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/ParseUtils.java
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers;
+
+import org.apache.directory.api.util.Strings;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+
+import static org.apache.ambari.view.hive20.client.ColumnDescription.DataTypes;
+
+public class ParseUtils {
+
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(ParseUtils.class);
+
+  final public static DataTypes[] dataTypeList = {DataTypes.BOOLEAN, DataTypes.INT, DataTypes.BIGINT, DataTypes.DOUBLE, DataTypes.CHAR, DataTypes.TIMESTAMP, DataTypes.DATE, DataTypes.STRING};
+  private static final String HIVE_DATE_FORMAT = "yyyy-MM-dd";
+
+  // no strict checking required as it is done by Date parsing
+  private static final String HIVE_DATE_FORMAT_REGEX = "^[0-9]{4}-[0-9]?[0-9]-[0-9]?[0-9]$";
+
+
+  public static boolean isInteger(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Integer)
+      return true;
+
+    try {
+      Integer i = Integer.parseInt(object.toString());
+      return true;
+    } catch (NumberFormatException nfe) {
+      return false;
+    }
+  }
+
+  public static boolean isBoolean(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Boolean)
+      return true;
+
+    String strValue = object.toString();
+    return strValue.equalsIgnoreCase("true") || strValue.equalsIgnoreCase("false");
+  }
+
+  public static boolean isString(Object object) {
+    return object != null;
+  }
+
+  public static boolean isLong(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Long)
+      return true;
+
+    try {
+      Long i = Long.parseLong(object.toString());
+      return true;
+    } catch (Exception nfe) {
+      return false;
+    }
+  }
+
+  public static boolean isDouble(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Double)
+      return true;
+
+    try {
+      Double i = Double.parseDouble(object.toString());
+      return true;
+    } catch (Exception nfe) {
+      return false;
+    }
+  }
+
+  public static boolean isChar(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Character)
+      return true;
+
+    String str = object.toString().trim();
+    return str.length() == 1;
+
+  }
+
+  public static boolean isDate(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Date)
+      return true;
+
+    String str = object.toString();
+    if (Strings.isNotEmpty(str)) {
+      str = str.trim();
+      if (str.matches(HIVE_DATE_FORMAT_REGEX)) {
+        try {
+          SimpleDateFormat sdf = new SimpleDateFormat(HIVE_DATE_FORMAT);
+          sdf.setLenient(false);
+          Date date = sdf.parse(str);
+          return true;
+        } catch (Exception e) {
+          LOG.debug("error while parsing as date string {}, format {}", str, HIVE_DATE_FORMAT, e);
+        }
+      }
+    }
+    return false;
+  }
+
+  public static boolean isTimeStamp(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Date)
+      return true;
+
+    String str = object.toString();
+    try {
+      Timestamp ts = Timestamp.valueOf(str);
+      return true;
+    } catch (Exception e) {
+      LOG.debug("error while parsing as timestamp string {}", str, e);
+    }
+
+    return false;
+  }
+
+  public static DataTypes detectHiveDataType(Object object) {
+    // detect Integer
+    if (isBoolean(object)) return DataTypes.BOOLEAN;
+    if (isInteger(object)) return DataTypes.INT;
+    if (isLong(object)) return DataTypes.BIGINT;
+    if (isDouble(object)) return DataTypes.DOUBLE;
+    if (isChar(object)) return DataTypes.CHAR;
+    if (isTimeStamp(object)) return DataTypes.TIMESTAMP;
+    if (isDate(object)) return DataTypes.DATE;
+
+    return DataTypes.STRING;
+  }
+
+  public static boolean checkDatatype( Object object, DataTypes datatype){
+    switch(datatype){
+
+      case BOOLEAN :
+        return isBoolean(object);
+      case INT :
+        return isInteger(object);
+      case BIGINT :
+        return isLong(object);
+      case DOUBLE:
+        return isDouble(object);
+      case CHAR:
+        return isChar(object);
+      case DATE:
+        return isDate(object);
+      case TIMESTAMP:
+        return isTimeStamp(object);
+      case STRING:
+        return isString(object);
+
+      default:
+        LOG.error("this datatype detection is not supported : {}", datatype);
+        return false;
+    }
+  }
+
+  public static DataTypes detectHiveColumnDataType(List<Object> colValues) {
+    boolean found;
+    for(DataTypes datatype : dataTypeList){
+      found = true;
+      for(Object object : colValues){
+        if(!checkDatatype(object,datatype)){
+          found = false;
+          break;
+        }
+      }
+
+      if(found) return datatype;
+    }
+
+    return DataTypes.STRING; //default
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/Parser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/Parser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/Parser.java
new file mode 100644
index 0000000..a012463
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/Parser.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+/**
+ * provides general implementation for parsing JSON,CSV,XML file
+ * to generate preview rows, headers and column types
+ * also provides TableDataReader for converting any type to CSV.
+ */
+public abstract class Parser implements IParser {
+
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(Parser.class);
+  public static final String COLUMN_PREFIX = "column";
+
+  protected Reader reader; // same as CSV reader in this case
+  protected ParseOptions parseOptions;
+  private int numberOfPreviewRows = 10;
+
+  public Parser(Reader originalReader, ParseOptions parseOptions) {
+    this.reader = originalReader;
+    this.parseOptions = parseOptions;
+  }
+
+  /**
+   * returns which datatype is valid for all the values
+   */
+
+  /**
+   *
+   * @param rows : non empty list of rows
+   * @param colNum : to detect datatype for this column number.
+   * @return data type for that column
+   */
+  private ColumnDescription.DataTypes getLikelyDataType(List<Row> rows, int colNum) {
+    // order of detection BOOLEAN,INT,BIGINT,DOUBLE,DATE,CHAR,STRING
+    List<Object> colValues = new ArrayList<>(rows.size());
+    for( Row row : rows ){
+      colValues.add(row.getRow()[colNum]);
+    }
+
+    return ParseUtils.detectHiveColumnDataType(colValues);
+  }
+
+  @Override
+  public PreviewData parsePreview() {
+    LOG.info("generating preview for : {}", this.parseOptions );
+
+    ArrayList<Row> previewRows;
+    List<ColumnDescription> header;
+
+    try {
+      numberOfPreviewRows = (Integer) parseOptions.getOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS);
+    } catch (Exception e) {
+      LOG.debug("Illegal number of preview columns supplied {}",parseOptions.getOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS) );
+    }
+
+    int numberOfRows = numberOfPreviewRows;
+    previewRows = new ArrayList<>(numberOfPreviewRows);
+
+    Row headerRow = null;
+    Integer numOfCols = null;
+
+    if (parseOptions.getOption(ParseOptions.OPTIONS_HEADER) != null &&
+      ( parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.FIRST_RECORD.toString()) ||
+        parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.EMBEDDED.toString())
+      )) {
+      headerRow = extractHeader();
+      numOfCols = headerRow.getRow().length;
+    }
+
+    Row r;
+    if (iterator().hasNext()) {
+      r = iterator().next();
+      if( null == numOfCols ) {
+        numOfCols = r.getRow().length;
+      }
+    } else {
+      LOG.error("No rows found in the file. returning error.");
+      throw new NoSuchElementException("No rows in the file.");
+    }
+
+    while (true) {
+      // create Header definition from row
+      Object[] values = r.getRow();
+      Object[] newValues= new Object[numOfCols]; // adds null if less columns detected and removes extra columns if any
+
+      for (int colNum = 0; colNum < numOfCols; colNum++) {
+        if(colNum < values.length) {
+          newValues[colNum] = values[colNum];
+        }else{
+          newValues[colNum] = null;
+        }
+      }
+
+      previewRows.add(new Row(newValues));
+
+      numberOfRows--;
+      if (numberOfRows <= 0 || !iterator().hasNext())
+        break;
+
+      r = iterator().next();
+    }
+
+    if (previewRows.size() <= 0) {
+      LOG.error("No rows found in the file. returning error.");
+      throw new NoSuchElementException("Does not contain any rows.");
+    }
+
+    // find data types.
+    header = generateHeader(headerRow,previewRows,numOfCols);
+
+    return new PreviewData(header,previewRows);
+  }
+
+  private List<ColumnDescription> generateHeader(Row headerRow,List<Row> previewRows, int numOfCols) {
+    List<ColumnDescription> header = new ArrayList<>();
+
+    for (int colNum = 0; colNum < numOfCols; colNum++) {
+      ColumnDescription.DataTypes type = getLikelyDataType(previewRows,colNum);
+      LOG.info("datatype detected for column {} : {}", colNum, type);
+
+      String colName = COLUMN_PREFIX + (colNum + 1);
+      if (null != headerRow)
+        colName = (String) headerRow.getRow()[colNum];
+
+      ColumnDescription cd = new ColumnDescriptionImpl(colName, type.toString(), colNum);
+      header.add(cd);
+    }
+
+    LOG.debug("return headers : {} ", header);
+    return header;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/PreviewData.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/PreviewData.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/PreviewData.java
new file mode 100644
index 0000000..50af529
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/PreviewData.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.Row;
+
+import java.util.List;
+
+/**
+ * Encapsulating preview data from parser.
+ */
+public class PreviewData {
+  private List<ColumnDescription> header;
+  private List<Row> previewRows;
+
+  public PreviewData() {
+  }
+
+  public PreviewData(List<ColumnDescription> header, List<Row> previewRows) {
+    this.header = header;
+    this.previewRows = previewRows;
+  }
+
+  public List<ColumnDescription> getHeader() {
+    return header;
+  }
+
+  public void setHeader(List<ColumnDescription> header) {
+    this.header = header;
+  }
+
+  public List<Row> getPreviewRows() {
+    return previewRows;
+  }
+
+  public void setPreviewRows(List<Row> previewRows) {
+    this.previewRows = previewRows;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/RowIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/RowIterator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/RowIterator.java
new file mode 100644
index 0000000..80e460f
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/RowIterator.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive20.client.Row;
+
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+
+/**
+ * Converts the Map of values created by JSON/XML Parser into ordered values in Row
+ * Takes RowMapIterator as input
+ */
+public class RowIterator implements Iterator<Row> {
+
+  private LinkedList<String> headers = null;
+  private RowMapIterator iterator;
+
+  /**
+   * creates a row iterator for the map values in RowMapIterator
+   * keeps the keys in map as header.
+   * @param iterator
+   */
+  public RowIterator(RowMapIterator iterator) {
+    this.iterator = iterator;
+    LinkedHashMap<String, String> obj = iterator.peek();
+    headers = new LinkedList<>();
+    if (null != obj) {
+      headers.addAll(obj.keySet());
+    }
+  }
+
+  @Override
+  public boolean hasNext() {
+    return iterator.hasNext();
+  }
+
+
+  @Override
+  public Row next() {
+    LinkedHashMap<String, String> r = this.iterator.next();
+    if (null == r) {
+      return null;
+    }
+
+    return convertToRow(r);
+  }
+
+  @Override
+  public void remove() {
+    iterator.remove();
+  }
+
+  /**
+   * @return : ordered collection of string of headers
+   */
+  public LinkedList<String> extractHeaders() {
+    return headers;
+  }
+
+  /**
+   * converts the map into a Row
+   * @param lr
+   * @return
+   */
+  private Row convertToRow(LinkedHashMap<String, String> lr) {
+    Object[] data = new Object[headers.size()];
+    int i = 0;
+    for (String cd : headers) {
+      String d = lr.get(cd);
+
+      if (d != null)
+        d = d.trim(); // trim to remove any \n etc which is used as a separator for rows in TableDataReader
+
+      data[i++] = d;
+    }
+
+    return new Row(data);
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/RowMapIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/RowMapIterator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/RowMapIterator.java
new file mode 100644
index 0000000..f429157
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/RowMapIterator.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers;
+
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+
+/**
+ * iterator which generates Ordered Map of column name and values for each row from streams like JSON and XML
+ */
+public interface RowMapIterator extends Iterator<LinkedHashMap<String, String>> {
+  LinkedHashMap<String, String> peek() ;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/commonscsv/CSVIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/commonscsv/CSVIterator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/commonscsv/CSVIterator.java
new file mode 100644
index 0000000..039bf21
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/commonscsv/CSVIterator.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers.csv.commonscsv;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.commons.csv.CSVRecord;
+
+import java.util.Iterator;
+
+/**
+ * iterates over the input CSV records and generates Row objects
+ */
+class CSVIterator implements Iterator<Row> {
+
+  private Iterator<CSVRecord> iterator;
+
+  public CSVIterator(Iterator<CSVRecord> iterator) {
+    this.iterator = iterator;
+  }
+
+  @Override
+  public boolean hasNext() {
+    return iterator.hasNext();
+  }
+
+  @Override
+  public Row next() {
+    CSVRecord row = iterator.next();
+    Object[] values = new Object[row.size()];
+    for (int i = 0; i < values.length; i++) {
+      values[i] = row.get(i);
+    }
+    Row r = new Row(values);
+    return r;
+  }
+
+  @Override
+  public void remove() {
+    this.iterator.remove();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/commonscsv/CSVParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/commonscsv/CSVParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/commonscsv/CSVParser.java
new file mode 100644
index 0000000..78ba77d
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/commonscsv/CSVParser.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive20.resources.uploads.parsers.csv.commonscsv;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.Parser;
+import org.apache.commons.csv.CSVFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Iterator;
+
+/**
+ * Parses the given Reader which contains CSV stream and extracts headers and rows, and detect datatypes of columns
+ */
+public class CSVParser extends Parser {
+  private CSVIterator iterator;
+  private org.apache.commons.csv.CSVParser parser;
+  private final static Logger LOG =
+    LoggerFactory.getLogger(CSVParser.class);
+
+  public CSVParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    CSVFormat format = CSVFormat.DEFAULT;
+    String optHeader =  (String)parseOptions.getOption(ParseOptions.OPTIONS_HEADER);
+    if(optHeader != null){
+      if(optHeader.equals(ParseOptions.HEADER.FIRST_RECORD.toString())) {
+        format = format.withHeader();
+      }else if( optHeader.equals(ParseOptions.HEADER.PROVIDED_BY_USER.toString())){
+        String [] headers = (String[]) parseOptions.getOption(ParseOptions.OPTIONS_HEADERS);
+        format = format.withHeader(headers);
+      }
+    }
+
+    Character delimiter = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_DELIMITER);
+    if(delimiter != null){
+      LOG.info("setting delimiter as {}", delimiter);
+      format = format.withDelimiter(delimiter);
+    }
+
+    Character quote = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_QUOTE);
+    if( null != quote ){
+      LOG.info("setting Quote char : {}", quote);
+      format = format.withQuote(quote);
+    }
+
+    Character escape = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR);
+    if(escape != null){
+      LOG.info("setting escape as {}", escape);
+      format = format.withEscape(escape);
+    }
+
+    parser = new org.apache.commons.csv.CSVParser(this.reader,format );
+    iterator = new CSVIterator(parser.iterator());
+  }
+
+  @Override
+  public Row extractHeader() {
+    return new Row(parser.getHeaderMap().keySet().toArray());
+  }
+
+  @Override
+  public void close() throws Exception {
+    this.parser.close();
+  }
+
+  public Iterator<Row> iterator() {
+    return iterator; // only one iterator per parser.
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/opencsv/OpenCSVIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/opencsv/OpenCSVIterator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/opencsv/OpenCSVIterator.java
new file mode 100644
index 0000000..16550c2
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/opencsv/OpenCSVIterator.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers.csv.opencsv;
+
+import org.apache.ambari.view.hive20.client.Row;
+
+import java.util.Iterator;
+
+/**
+ * iterates over the input CSV records and generates Row objects
+ */
+class OpenCSVIterator implements Iterator<Row> {
+
+  private Iterator<String[]> iterator;
+
+  public OpenCSVIterator(Iterator<String[]> iterator) {
+    this.iterator = iterator;
+  }
+
+  @Override
+  public boolean hasNext() {
+    return iterator.hasNext();
+  }
+
+  @Override
+  public Row next() {
+    String[] row = iterator.next();
+    Object[] values = new Object[row.length];
+    for (int i = 0; i < values.length; i++) {
+      values[i] = row[i];
+    }
+    Row r = new Row(values);
+    return r;
+  }
+
+  @Override
+  public void remove() {
+    this.iterator.remove();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/opencsv/OpenCSVParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/opencsv/OpenCSVParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/opencsv/OpenCSVParser.java
new file mode 100644
index 0000000..ef89615
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/csv/opencsv/OpenCSVParser.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive20.resources.uploads.parsers.csv.opencsv;
+
+import com.opencsv.CSVParserBuilder;
+import com.opencsv.CSVReader;
+import com.opencsv.CSVReaderBuilder;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.Parser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Iterator;
+
+/**
+ * Parses the given Reader which contains CSV stream and extracts headers and rows
+ */
+public class OpenCSVParser extends Parser {
+  private Row headerRow;
+  private OpenCSVIterator iterator;
+  private CSVReader csvReader = null;
+  private final static Logger LOG =
+    LoggerFactory.getLogger(OpenCSVParser.class);
+
+  public OpenCSVParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    CSVParserBuilder csvParserBuilder = new CSVParserBuilder();
+    CSVReaderBuilder builder =  new CSVReaderBuilder(reader);
+
+    Character delimiter = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_DELIMITER);
+    if(delimiter != null){
+      LOG.info("setting delimiter as {}", delimiter);
+      csvParserBuilder = csvParserBuilder.withSeparator(delimiter);
+    }
+
+    Character quote = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_QUOTE);
+    if( null != quote ){
+      LOG.info("setting Quote char : {}", quote);
+      csvParserBuilder = csvParserBuilder.withQuoteChar(quote);
+    }
+
+    Character escapeChar = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR);
+    if( null != escapeChar ){
+      LOG.info("setting escapeChar : {}", escapeChar);
+      csvParserBuilder = csvParserBuilder.withEscapeChar(escapeChar);
+    }
+
+    builder.withCSVParser(csvParserBuilder.build());
+    this.csvReader = builder.build();
+    iterator = new OpenCSVIterator(this.csvReader.iterator());
+
+    String optHeader =  (String)parseOptions.getOption(ParseOptions.OPTIONS_HEADER);
+    if(optHeader != null){
+      if(optHeader.equals(ParseOptions.HEADER.FIRST_RECORD.toString())) {
+        this.headerRow = iterator().hasNext() ? iterator.next() : new Row(new Object[]{});
+      }
+    }
+
+  }
+
+  @Override
+  public Row extractHeader() {
+    return headerRow;
+  }
+
+  @Override
+  public void close() throws Exception {
+    this.csvReader.close();
+  }
+
+  public Iterator<Row> iterator() {
+    return iterator; // only one iterator per parser.
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/json/JSONIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/json/JSONIterator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/json/JSONIterator.java
new file mode 100644
index 0000000..d46602a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/json/JSONIterator.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers.json;
+
+import com.google.gson.stream.JsonReader;
+import com.google.gson.stream.JsonToken;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.EndOfDocumentException;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.RowMapIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.LinkedHashMap;
+
+/**
+ * iterates over the JsonReader and reads creates row data
+ * assumes the array of json objects.
+ * eg : [ { "col1Name" : "value-1-1", "col2Name" : "value-1-2"}, { "col1Name" : "value-2-1", "col2Name" : "value-2-2"}]
+ */
+class JSONIterator implements RowMapIterator {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(JSONIterator.class);
+
+  private LinkedHashMap<String, String> nextObject = null;
+
+  private LinkedHashMap<String, String> readNextObject(JsonReader reader) throws IOException, EndOfDocumentException {
+    LinkedHashMap<String, String> row = new LinkedHashMap<>();
+    boolean objectStarted = false;
+    boolean shouldBeName = false;
+    String currentName = null;
+
+    while (true) {
+      JsonToken token = reader.peek();
+      switch (token) {
+        case BEGIN_ARRAY:
+          throw new IllegalArgumentException("Row data cannot have an array.");
+        case END_ARRAY:
+          throw new EndOfDocumentException("End of Json Array document.");
+        case BEGIN_OBJECT:
+          if (objectStarted == true) {
+            throw new IllegalArgumentException("Nested objects not supported.");
+          }
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("name expected, got begin_object");
+          }
+          objectStarted = true;
+          shouldBeName = true;
+          reader.beginObject();
+          break;
+        case END_OBJECT:
+          if (shouldBeName == false) {
+            throw new IllegalArgumentException("value expected, got end_object");
+          }
+          reader.endObject();
+          return row;
+        case NAME:
+          if (shouldBeName == false) {
+            throw new IllegalArgumentException("name not expected at this point.");
+          }
+          shouldBeName = false;
+          currentName = reader.nextName();
+          break;
+        case NUMBER:
+        case STRING:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          String n = reader.nextString();
+          row.put(currentName, n);
+          shouldBeName = true;
+          break;
+        case BOOLEAN:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          String b = String.valueOf(reader.nextBoolean());
+          row.put(currentName, b);
+          shouldBeName = true;
+          break;
+        case NULL:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          reader.nextNull();
+          row.put(currentName, "");
+          shouldBeName = true;
+          break;
+        case END_DOCUMENT:
+          return row;
+
+        default:
+          throw new IllegalArgumentException("Illegal token detected inside json: token : " + token.toString());
+      }
+    }
+  }
+
+  private JsonReader reader;
+
+  public JSONIterator(JsonReader reader) throws IOException {
+    this.reader = reader;
+    // test the start of array
+    JsonToken jt = reader.peek();
+    if (jt != JsonToken.BEGIN_ARRAY) {
+      throw new IllegalArgumentException("Expected the whole document to contain a single JsonArray.");
+    }
+
+    reader.beginArray(); // read the start of array
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+    }
+  }
+
+  @Override
+  public boolean hasNext() {
+    return null != nextObject;
+  }
+
+  public LinkedHashMap<String, String> peek() {
+    return nextObject;
+  }
+
+  @Override
+  public LinkedHashMap<String, String> next() {
+    LinkedHashMap<String, String> currObject = nextObject;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+      LOG.debug("End of Json document reached with next character ending the JSON Array.");
+      nextObject = null;
+    } catch (Exception e){
+      // for any other exception throw error right away
+      throw new IllegalArgumentException(e);
+    }
+    return currObject;
+  }
+
+  @Override
+  public void remove() {
+    // no operation.
+    LOG.info("No operation when remove called on JSONIterator.");
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/json/JSONParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/json/JSONParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/json/JSONParser.java
new file mode 100644
index 0000000..58dae9e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/json/JSONParser.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers.json;
+
+import com.google.gson.stream.JsonReader;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.Parser;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.RowIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Collection;
+import java.util.Iterator;
+
+
+/**
+ * Parses the input data from reader as JSON and provides iterator for rows.
+ *
+ * Expects the input reader to contains a JsonArray in which each element is a JsonObject
+ * corresponding to the row.
+ * eg. :
+ *
+ * [
+ *  {row1-col1, row1-col2, row1-col3},
+ *  {row2-col1, row2-col2, row2-col3}
+ * ]
+ *
+ */
+public class JSONParser extends Parser {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(JSONParser.class);
+
+  private RowIterator iterator;
+  private JsonReader jsonReader;
+  private JSONIterator JSONIterator;
+
+  public JSONParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    this.jsonReader = new JsonReader(this.reader);
+    JSONIterator = new JSONIterator(this.jsonReader);
+    iterator = new RowIterator(JSONIterator);
+  }
+
+  @Override
+  public Row extractHeader() {
+    Collection<String> headers = this.iterator.extractHeaders();
+    Object[] objs = new Object[headers.size()];
+    Iterator<String> iterator = headers.iterator();
+    for(int i = 0 ; i < headers.size() ; i++){
+      objs[i] = iterator.next();
+    }
+
+    return new Row(objs);
+  }
+
+  @Override
+  public void close() throws Exception {
+    this.jsonReader.close();
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return iterator;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/xml/XMLIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/xml/XMLIterator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/xml/XMLIterator.java
new file mode 100644
index 0000000..c969b69
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/xml/XMLIterator.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers.xml;
+
+import org.apache.ambari.view.hive20.resources.uploads.parsers.EndOfDocumentException;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.RowMapIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.namespace.QName;
+import javax.xml.stream.XMLEventReader;
+import javax.xml.stream.XMLStreamConstants;
+import javax.xml.stream.XMLStreamException;
+import javax.xml.stream.events.*;
+import java.io.IOException;
+import java.util.LinkedHashMap;
+
+/**
+ * assumes XML of following format
+ * <table>
+ * <row>
+ * <col name="col1Name">row1-col1-Data</col>
+ * <col name="col2Name">row1-col2-Data</col>
+ * <col name="col3Name">row1-col3-Data</col>
+ * <col name="col4Name">row1-col4-Data</col>
+ * </row>
+ * <row>
+ * <col name="col1Name">row2-col1-Data</col>
+ * <col name="col2Name">row2-col2-Data</col>
+ * <col name="col3Name">row2-col3-Data</col>
+ * <col name="col4Name">row2-col4-Data</col>
+ * </row>
+ * </table>
+ */
+class XMLIterator implements RowMapIterator {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(XMLIterator.class);
+
+  private LinkedHashMap<String, String> nextObject = null;
+  private static final String TAG_TABLE = "table";
+  private static final String TAG_ROW = "row";
+  private static final String TAG_COL = "col";
+  private boolean documentStarted = false;
+  private XMLEventReader reader;
+
+  public XMLIterator(XMLEventReader reader) throws IOException {
+    this.reader = reader;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+      LOG.debug("error : {}", e);
+    } catch (XMLStreamException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public boolean hasNext() {
+    return null != nextObject;
+  }
+
+  public LinkedHashMap<String, String> peek() {
+    return nextObject;
+  }
+
+  @Override
+  public LinkedHashMap<String, String> next() {
+    LinkedHashMap<String, String> currObject = nextObject;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (IOException e) {
+      LOG.error("Exception occured while reading the next row from XML : {} ", e);
+      nextObject = null;
+    } catch (EndOfDocumentException e) {
+      LOG.debug("End of XML document reached with next character ending the XML.");
+      nextObject = null;
+    } catch (XMLStreamException e) {
+      LOG.error("Exception occured while reading the next row from XML : {} ", e);
+      nextObject = null;
+    }
+    return currObject;
+  }
+
+  @Override
+  public void remove() {
+    // no operation.
+    LOG.info("No operation when remove called.");
+  }
+
+  private LinkedHashMap<String, String> readNextObject(XMLEventReader reader) throws IOException, EndOfDocumentException, XMLStreamException {
+    LinkedHashMap<String, String> row = new LinkedHashMap<>();
+    boolean objectStarted = false;
+    String currentName = null;
+
+    while (true) {
+      XMLEvent event = reader.nextEvent();
+      switch (event.getEventType()) {
+        case XMLStreamConstants.START_ELEMENT:
+          StartElement startElement = event.asStartElement();
+          String qName = startElement.getName().getLocalPart();
+          LOG.debug("startName : {}" , qName);
+          switch (qName) {
+            case TAG_TABLE:
+              if (documentStarted) {
+                throw new IllegalArgumentException("Cannot have a <table> tag nested inside another <table> tag");
+              } else {
+                documentStarted = true;
+              }
+              break;
+            case TAG_ROW:
+              if (objectStarted) {
+                throw new IllegalArgumentException("Cannot have a <row> tag nested inside another <row> tag");
+              } else {
+                objectStarted = true;
+              }
+              break;
+            case TAG_COL:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray tag " + qName);
+              }
+              Attribute nameAttr = startElement.getAttributeByName( new QName("name"));
+              if( null == nameAttr ){
+                throw new IllegalArgumentException("Missing name attribute in col tag.");
+              }
+              currentName = nameAttr.getValue();
+              break;
+            default:
+              throw new IllegalArgumentException("Illegal start tag " + qName + " encountered.");
+          }
+          break;
+        case XMLStreamConstants.END_ELEMENT:
+          EndElement endElement = event.asEndElement();
+          String name = endElement.getName().getLocalPart();
+          LOG.debug("endName : {}", name);
+          switch (name) {
+            case TAG_TABLE:
+              if (!documentStarted) {
+                throw new IllegalArgumentException("Stray </table> tag.");
+              }
+              throw new EndOfDocumentException("End of XML document.");
+
+            case TAG_ROW:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray </row> tag.");
+              }
+              return row;
+
+            case TAG_COL:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray tag " + name);
+              }
+              currentName = null;
+              break;
+
+            default:
+              throw new IllegalArgumentException("Illegal start ending " + name + " encountered.");
+          }
+          break;
+        case XMLStreamConstants.CHARACTERS:
+          Characters characters = event.asCharacters();
+          if (characters.isWhiteSpace() && currentName == null)
+            break;
+          String data = characters.getData();
+          LOG.debug("character data : {}", data);
+          if (currentName == null) {
+            throw new IllegalArgumentException("Illegal characters outside any tag : " + data);
+          } else {
+            String oldData = row.get(currentName);
+            if (null != oldData) {
+              data = oldData + data;
+            }
+            row.put(currentName, data);
+          }
+          break;
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/xml/XMLParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/xml/XMLParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/xml/XMLParser.java
new file mode 100644
index 0000000..abcf2c0
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/xml/XMLParser.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.parsers.xml;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.Parser;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.RowIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.stream.XMLEventReader;
+import javax.xml.stream.XMLInputFactory;
+import javax.xml.stream.XMLStreamException;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Collection;
+import java.util.Iterator;
+
+/**
+ * assumes XML of following format
+ * <table>
+ * <row>
+ * <col name="col1Name">row1-col1-Data</col>
+ * <col name="col2Name">row1-col2-Data</col>
+ * <col name="col3Name">row1-col3-Data</col>
+ * <col name="col4Name">row1-col4-Data</col>
+ * </row>
+ * <row>
+ * <col name="col1Name">row2-col1-Data</col>
+ * <col name="col2Name">row2-col2-Data</col>
+ * <col name="col3Name">row2-col3-Data</col>
+ * <col name="col4Name">row2-col4-Data</col>
+ * </row>
+ * </table>
+ */
+public class XMLParser extends Parser {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(XMLParser.class);
+
+  private RowIterator iterator;
+  private XMLEventReader xmlReader;
+  private XMLIterator xmlIterator;
+
+  public XMLParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    XMLInputFactory factory = XMLInputFactory.newInstance();
+    try {
+      this.xmlReader = factory.createXMLEventReader(reader);
+    } catch (XMLStreamException e) {
+      LOG.error("error occurred while creating xml reader : ", e);
+      throw new IOException("error occurred while creating xml reader : ", e);
+    }
+    xmlIterator = new XMLIterator(this.xmlReader);
+    iterator = new RowIterator(xmlIterator);
+  }
+
+  @Override
+  public Row extractHeader() {
+    Collection<String> headers = this.iterator.extractHeaders();
+    Object[] objs = new Object[headers.size()];
+    Iterator<String> iterator = headers.iterator();
+    for (int i = 0; i < headers.size(); i++) {
+      objs[i] = iterator.next();
+    }
+
+    return new Row(objs);
+  }
+
+  @Override
+  public void close() throws Exception {
+    try {
+      this.xmlReader.close();
+    } catch (XMLStreamException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return iterator;
+  }
+}


[04/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.svg
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.svg b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.svg
new file mode 100644
index 0000000..52b2832
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.svg
@@ -0,0 +1,314 @@
+<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg xmlns="http://www.w3.org/2000/svg">
+<defs >
+<font id="Roboto" horiz-adv-x="1137" ><font-face
+    font-family="Roboto Light"
+    units-per-em="2048"
+    panose-1="2 0 0 0 0 0 0 0 0 0"
+    ascent="1900"
+    descent="-500"
+    alphabetic="0" />
+<glyph unicode=" " horiz-adv-x="498" />
+<glyph unicode="!" horiz-adv-x="462" d="M284 405H173L167 1456H291L284 405ZM153 70Q153 104 175 127T235 151T295 128T318 70Q318 37 296 15T235 -8T175 14T153 70Z" />
+<glyph unicode="&quot;" horiz-adv-x="588" d="M243 1396L223 1083H143L146 1536H243V1396ZM479 1396L459 1083H378L382 1536H479V1396Z" />
+<glyph unicode="#" horiz-adv-x="1191" d="M753 410H439L362 0H263L340 410H85V503H357L440 944H161V1040H458L537 1456H636L557 1040H872L951 1456H1051L972 1040H1201V944H954L871 503H1126V410H853L776 0H676L753 410ZM456 503H771L854 944H539L456 503Z" />
+<glyph unicode="$" horiz-adv-x="1135" d="M901 359Q901 470 829 540T575 674Q349 745 258 842T167 1095Q167 1258 267 1359T539 1475V1677H641V1475Q817 1459 913 1343T1010 1028H891Q891 1185 810 1277T587 1370Q445 1370 366 1296T286 1097Q286 977 359 910T607
+783T862 669T981 540T1021 361Q1021 197 919 97T637 -18V-208H536V-19Q335 -6 225 107T115 418H235Q235 262 326 174T580 85Q722 85 811 161T901 359Z" />
+<glyph unicode="%" horiz-adv-x="1513" d="M109 1176Q109 1306 189 1391T394 1477T598 1392T679 1170V1099Q679 971 600 886T396 800Q273 800 191 884T109 1106V1176ZM206 1099Q206 1006 257 946T396 886Q481 886 531 946T582 1103V1176Q582 1269 530 1329T394
+1390Q311 1390 259 1330T206 1170V1099ZM842 357Q842 487 922 572T1126 657T1330 573T1412 350V279Q1412 149 1332 64T1128 -21T924 63T842 284V357ZM938 279Q938 185 989 125T1128 65Q1214 65 1264 125T1315 284V357Q1315 453 1264 511T1126 570Q1042 570 990
+511T938 353V279ZM434 121L359 169L1070 1307L1145 1259L434 121Z" />
+<glyph unicode="&amp;" horiz-adv-x="1260" d="M404 794Q317 899 278 981T238 1145Q238 1298 329 1387T573 1476Q712 1476 798 1396T884 1191Q884 1047 718 908L558 784L958 318Q1049 465 1049 651H1160Q1160 403 1032 232L1231 0H1087L961 146Q882 68 779 24T560
+-20Q352 -20 230 86T108 371Q108 477 170 571T390 784L404 794ZM560 81Q651 81 736 119T890 229L483 701L469 716L423 681Q227 521 227 371Q227 240 317 161T560 81ZM358 1149Q358 1027 493 861L624 961Q688 1007 729 1062T770 1191Q770 1269 716 1321T572 1374Q474
+1374 416 1311T358 1149Z" />
+<glyph unicode="&apos;" horiz-adv-x="348" d="M226 1395L209 1090H119Q124 1386 124 1536H226V1395Z" />
+<glyph unicode="(" horiz-adv-x="653" d="M140 588Q140 806 196 1011T360 1387T592 1632L621 1551Q555 1504 490 1414T374 1200T292 922T260 571Q260 362 307 169T438 -171T621 -393L592 -470Q465 -394 357 -225T195 148T140 588Z" />
+<glyph unicode=")" horiz-adv-x="667" d="M514 573Q514 353 460 150T298 -223T62 -470L33 -393Q131 -323 214 -176T346 166T394 591Q394 798 346 990T214 1334T33 1555L62 1632Q188 1555 295 1386T458 1011T514 573Z" />
+<glyph unicode="*" horiz-adv-x="869" d="M361 1000L29 1108L61 1209L393 1086L389 1456H493L485 1083L809 1210L842 1109L509 994L732 700L647 637L433 942L229 639L144 700L361 1000Z" />
+<glyph unicode="+" horiz-adv-x="1156" d="M630 740H1073V628H630V146H509V628H75V740H509V1206H630V740Z" />
+<glyph unicode="," horiz-adv-x="392" d="M131 -272L60 -220Q151 -98 154 33V188H271V63Q271 -145 131 -272Z" />
+<glyph unicode="-" horiz-adv-x="586" d="M528 592H49V693H528V592Z" />
+<glyph unicode="." horiz-adv-x="489" d="M145 72Q145 107 167 131T230 156T293 132T316 72T293 15T230 -8T168 14T145 72Z" />
+<glyph unicode="/" horiz-adv-x="813" d="M139 -125H30L638 1456H746L139 -125Z" />
+<glyph unicode="0" horiz-adv-x="1135" d="M1015 607Q1015 299 902 140T569 -20Q353 -20 238 136T120 592V853Q120 1160 234 1318T567 1476Q783 1476 897 1324T1015 874V607ZM895 868Q895 1118 814 1246T567 1374Q405 1374 323 1249T239 880V594Q239 345 323 213T569
+81Q729 81 811 210T895 588V868Z" />
+<glyph unicode="1" horiz-adv-x="1135" d="M694 0H574V1312L178 1165V1277L674 1461H694V0Z" />
+<glyph unicode="2" horiz-adv-x="1135" d="M1049 0H137V92L636 658Q760 801 808 894T856 1075Q856 1213 775 1293T552 1374Q405 1374 315 1280T224 1036H105Q105 1159 160 1260T318 1418T552 1476Q752 1476 864 1371T977 1085Q977 983 914 862T690 560L284 101H1049V0Z" />
+<glyph unicode="3" horiz-adv-x="1135" d="M403 793H527Q630 793 707 829T824 929T865 1076Q865 1216 786 1295T559 1374Q419 1374 330 1292T240 1074H120Q120 1187 177 1280T335 1425T559 1476Q757 1476 871 1368T985 1072Q985 967 919 879T736 746Q872 708 942
+616T1012 395Q1012 208 890 94T564 -20Q434 -20 326 32T158 177T98 395H218Q218 256 315 169T564 81Q719 81 805 160T892 391Q892 537 799 614T523 691H403V793Z" />
+<glyph unicode="4" horiz-adv-x="1135" d="M872 469H1099V368H872V0H752V368H67V436L741 1456H872V469ZM214 469H752V1301L699 1209L214 469Z" />
+<glyph unicode="5" horiz-adv-x="1135" d="M218 746L289 1456H1017V1345H392L341 853Q458 933 615 933Q812 933 929 805T1046 464Q1046 234 932 107T611 -20Q421 -20 303 86T168 383H283Q300 234 384 158T611 81Q767 81 846 180T926 462Q926 622 837 723T594 824Q509
+824 446 803T313 719L218 746Z" />
+<glyph unicode="6" horiz-adv-x="1135" d="M843 1467V1362H829Q568 1362 418 1209T252 782Q312 865 405 910T613 956Q805 956 918 824T1032 477Q1032 335 979 221T827 44T601 -20Q392 -20 261 131T130 523V643Q130 1034 308 1248T813 1467H843ZM594 853Q480 853
+382 786T250 614V512Q250 322 347 202T601 82Q741 82 827 193T914 473Q914 645 828 749T594 853Z" />
+<glyph unicode="7" horiz-adv-x="1135" d="M1034 1387L412 0H287L905 1354H77V1456H1034V1387Z" />
+<glyph unicode="8" horiz-adv-x="1135" d="M995 1081Q995 968 929 879T755 747Q881 704 957 608T1033 386Q1033 199 906 90T570 -20Q359 -20 233 89T106 386Q106 510 179 607T379 747Q271 789 207 878T143 1081Q143 1262 259 1369T568 1476T877 1368T995 1081ZM913
+385Q913 521 816 608T568 696T321 610T225 385T318 164T570 81Q725 81 819 163T913 385ZM875 1082Q875 1207 789 1290T568 1374Q432 1374 348 1294T263 1082Q263 954 347 876T569 798Q704 798 789 876T875 1082Z" />
+<glyph unicode="9" horiz-adv-x="1135" d="M884 674Q820 580 725 529T519 477Q395 477 300 541T153 718T101 965Q101 1109 156 1227T311 1410T541 1476Q760 1476 882 1323T1004 887V779Q1004 385 836 187T323 -11H301L302 93H344Q605 97 741 241T884 674ZM534
+580Q654 580 749 651T885 837V906Q885 1128 793 1250T543 1373Q401 1373 310 1259T219 970Q219 803 306 692T534 580Z" />
+<glyph unicode=":" horiz-adv-x="430" d="M383 72Q383 107 405 131T468 156T531 132T554 72T531 15T468 -8T406 14T383 72ZM129 995Q129 1030 151 1054T214 1079T277 1055T300 995T277 938T214 915T152 937T129 995Z" />
+<glyph unicode=";" horiz-adv-x="399" d="M118 995Q118 1030 140 1054T203 1079T266 1055T289 995T266 938T203 915T141 937T118 995ZM131 -272L60 -220Q151 -98 154 33V188H271V63Q271 -145 131 -272Z" />
+<glyph unicode="&lt;" horiz-adv-x="1047" d="M208 655L904 355V229L77 608V705L904 1083V957L208 655Z" />
+<glyph unicode="=" horiz-adv-x="1133" d="M983 829H149V935H983V829ZM983 418H149V524H983V418Z" />
+<glyph unicode="&gt;" horiz-adv-x="1061" d="M835 659L124 962V1085L969 707V610L124 231V355L835 659Z" />
+<glyph unicode="?" horiz-adv-x="930" d="M376 404Q378 522 408 594T537 763T664 901T708 990T724 1101Q724 1226 658 1297T472 1369Q352 1369 279 1301T203 1115H84Q86 1279 195 1377T472 1476Q644 1476 743 1376T843 1103Q843 995 794 901T608 680Q495 585 495
+404H376ZM360 70Q360 104 381 127T442 151Q480 151 502 128T525 70Q525 37 503 15T442 -8Q403 -8 382 14T360 70Z" />
+<glyph unicode="@" horiz-adv-x="1870" d="M1754 513Q1749 366 1700 241T1565 48T1364 -20Q1267 -20 1206 31T1125 174Q1017 -20 827 -20Q687 -20 618 101T567 427Q582 590 641 717T796 916T1001 988Q1078 988 1136 967T1271 880L1220 310Q1210 194 1249 130T1376
+66Q1499 66 1575 186T1661 513Q1680 918 1507 1122T983 1327Q772 1327 603 1222T335 923T225 478T291 35T528 -260T906 -363Q998 -363 1087 -341T1236 -284L1267 -364Q1210 -402 1108 -427T902 -453Q652 -453 472 -341T203 -17T125 478Q137 756 247 970T550 1302T987
+1420Q1242 1420 1419 1314T1681 1002T1754 513ZM673 286Q684 186 729 132T848 77Q1033 77 1121 332L1166 848Q1099 897 1008 897Q897 897 816 809T696 565T673 286Z" />
+<glyph unicode="A" horiz-adv-x="1279" d="M970 408H309L159 0H30L581 1456H698L1249 0H1121L970 408ZM347 513H931L639 1306L347 513Z" />
+<glyph unicode="B" horiz-adv-x="1255" d="M184 0V1456H614Q848 1456 969 1360T1090 1075Q1090 962 1029 879T860 759Q987 731 1064 634T1142 410Q1142 217 1018 109T671 0H184ZM307 700V104H676Q834 104 926 184T1019 408Q1019 543 931 621T686 700H307ZM307
+803H643Q797 806 881 875T966 1078Q966 1218 879 1284T614 1351H307V803Z" />
+<glyph unicode="C" horiz-adv-x="1330" d="M1215 454Q1190 224 1051 102T679 -20Q517 -20 393 61T200 290T131 630V819Q131 1013 199 1163T394 1394T688 1476Q922 1476 1057 1350T1215 1000H1091Q1045 1371 688 1371Q490 1371 373 1223T255 814V636Q255 384 369
+234T679 84Q872 84 970 176T1091 454H1215Z" />
+<glyph unicode="D" horiz-adv-x="1341" d="M184 0V1456H591Q770 1456 912 1375T1133 1141T1213 795V661Q1213 466 1134 315T912 82T582 0H184ZM307 1351V104H583Q813 104 952 256T1091 669V797Q1091 1048 954 1199T593 1351H307Z" />
+<glyph unicode="E" horiz-adv-x="1165" d="M988 698H307V104H1090V0H184V1456H1085V1351H307V802H988V698Z" />
+<glyph unicode="F" horiz-adv-x="1152" d="M986 680H307V0H184V1456H1086V1351H307V785H986V680Z" />
+<glyph unicode="G" horiz-adv-x="1400" d="M1235 173Q1171 82 1035 31T729 -20Q558 -20 425 62T219 294T145 638V822Q145 1125 298 1300T709 1476Q934 1476 1071 1362T1234 1046H1111Q1084 1206 981 1288T710 1371Q506 1371 387 1226T268 817V645Q268 479 324
+352T486 154T729 84Q888 84 1002 134Q1076 167 1112 211V587H721V691H1235V173Z" />
+<glyph unicode="H" horiz-adv-x="1449" d="M1263 0H1139V698H307V0H184V1456H307V802H1139V1456H1263V0Z" />
+<glyph unicode="I" horiz-adv-x="545" d="M334 0H211V1456H334V0Z" />
+<glyph unicode="J" horiz-adv-x="1127" d="M827 1456H951V433Q951 226 832 103T511 -20Q299 -20 185 91T71 401H194Q194 243 277 164T511 84Q650 84 737 176T827 426V1456Z" />
+<glyph unicode="K" horiz-adv-x="1292" d="M512 723L307 521V0H184V1456H307V671L1053 1456H1208L598 808L1255 0H1105L512 723Z" />
+<glyph unicode="L" horiz-adv-x="1079" d="M308 104H1027V0H184V1456H308V104Z" />
+<glyph unicode="M" horiz-adv-x="1772" d="M347 1456L884 171L1423 1456H1587V0H1464V634L1474 1284L932 0H837L297 1279L307 638V0H184V1456H347Z" />
+<glyph unicode="N" horiz-adv-x="1454" d="M1268 0H1145L308 1246V0H184V1456H308L1146 209V1456H1268V0Z" />
+<glyph unicode="O" horiz-adv-x="1386" d="M1260 649Q1260 448 1191 296T992 62T694 -20Q439 -20 282 162T125 655V805Q125 1004 195 1157T395 1393T692 1476T988 1395T1187 1166T1260 823V649ZM1137 807Q1137 1070 1018 1219T692 1368Q489 1368 369 1219T248
+801V649Q248 390 368 239T694 87Q903 87 1020 236T1137 653V807Z" />
+<glyph unicode="P" horiz-adv-x="1261" d="M307 593V0H184V1456H680Q907 1456 1038 1340T1170 1021Q1170 816 1044 705T677 593H307ZM307 697H680Q859 697 953 782T1047 1019Q1047 1170 954 1259T688 1351H307V697Z" />
+<glyph unicode="Q" horiz-adv-x="1386" d="M1256 649Q1256 441 1183 287T973 53L1238 -178L1153 -254L856 3Q774 -20 689 -20Q523 -20 394 62T193 294T121 642V805Q121 1004 191 1157T391 1393T687 1476Q857 1476 986 1394T1185 1159T1256 806V649ZM1133 807Q1133
+1070 1014 1219T687 1368Q485 1368 365 1219T244 801V649Q244 390 363 239T689 87Q897 87 1015 236T1133 652V807Z" />
+<glyph unicode="R" horiz-adv-x="1300" d="M728 606H305V0H181V1456H654Q887 1456 1018 1343T1149 1027Q1149 887 1067 780T847 632L1211 13V0H1080L728 606ZM305 711H682Q837 711 931 799T1025 1027Q1025 1181 927 1266T652 1351H305V711Z" />
+<glyph unicode="S" horiz-adv-x="1213" d="M1008 358Q1008 479 923 549T612 683T282 822Q134 928 134 1100Q134 1267 271 1371T623 1476Q768 1476 882 1420T1060 1264T1123 1041H999Q999 1190 897 1280T623 1371Q456 1371 357 1297T258 1102Q258 991 347 921T632
+798T929 687T1081 549T1132 360Q1132 188 995 84T632 -20Q478 -20 350 35T155 189T88 416H211Q211 262 326 173T632 84Q802 84 905 159T1008 358Z" />
+<glyph unicode="T" horiz-adv-x="1223" d="M1172 1351H673V0H550V1351H52V1456H1172V1351Z" />
+<glyph unicode="U" horiz-adv-x="1346" d="M1187 1456V462Q1186 315 1122 206T942 39T674 -20Q444 -20 306 105T162 453V1456H284V471Q284 287 389 186T674 84T958 186T1063 470V1456H1187Z" />
+<glyph unicode="V" horiz-adv-x="1263" d="M623 180L631 149L640 180L1098 1456H1233L691 0H573L31 1456H165L623 180Z" />
+<glyph unicode="W" horiz-adv-x="1836" d="M453 393L498 167L553 383L869 1456H980L1292 383L1346 165L1394 393L1657 1456H1783L1410 0H1292L962 1139L925 1283L889 1139L551 0H433L61 1456H187L453 393Z" />
+<glyph unicode="X" horiz-adv-x="1253" d="M627 840L1037 1456H1184L702 738L1199 0H1051L627 636L201 0H55L553 738L70 1456H217L627 840Z" />
+<glyph unicode="Y" horiz-adv-x="1226" d="M611 662L1056 1456H1198L672 548V0H549V548L24 1456H170L611 662Z" />
+<glyph unicode="Z" horiz-adv-x="1225" d="M239 104H1138V0H90V93L954 1351H116V1456H1106V1368L239 104Z" />
+<glyph unicode="[" horiz-adv-x="491" d="M493 1562H283V-210H493V-312H163V1664H493V1562Z" />
+<glyph unicode="\" horiz-adv-x="807" d="M48 1456H165L773 -125H656L48 1456Z" />
+<glyph unicode="]" horiz-adv-x="491" d="M0 1664H331V-312H0V-210H211V1562H0V1664Z" />
+<glyph unicode="^" horiz-adv-x="852" d="M421 1298L193 729H77L376 1456H466L764 729H648L421 1298Z" />
+<glyph unicode="_" horiz-adv-x="884" d="M882 -101H1V0H882V-101Z" />
+<glyph unicode="`" horiz-adv-x="585" d="M438 1245H329L103 1524H247L438 1245Z" />
+<glyph unicode="a" horiz-adv-x="1097" d="M839 0Q821 51 816 151Q753 69 656 25T449 -20Q293 -20 197 67T100 287Q100 445 231 537T598 629H815V752Q815 868 744 934T535 1001Q410 1001 328 937T246 783L126 784Q126 913 246 1007T541 1102Q722 1102 826 1012T934
+759V247Q934 90 967 12V0H839ZM463 86Q583 86 677 144T815 299V537H601Q422 535 321 472T220 297Q220 206 287 146T463 86Z" />
+<glyph unicode="b" horiz-adv-x="1135" d="M1027 530Q1027 277 915 129T614 -20Q388 -20 272 148L267 0H155V1536H274V925Q388 1102 612 1102Q804 1102 915 956T1027 548V530ZM907 551Q907 765 824 881T590 998Q475 998 395 942T274 776V288Q364 84 592 84Q740
+84 823 201T907 551Z" />
+<glyph unicode="c" horiz-adv-x="1055" d="M556 81Q681 81 765 151T857 334H972Q967 235 910 154T759 26T556 -20Q343 -20 219 128T94 526V562Q94 722 150 845T310 1035T555 1102Q733 1102 848 996T972 717H857Q849 844 766 922T555 1000Q393 1000 304 883T214
+555V520Q214 313 303 197T556 81Z" />
+<glyph unicode="d" horiz-adv-x="1138" d="M108 551Q108 803 220 952T526 1102Q745 1102 860 929V1536H979V0H867L862 144Q747 -20 524 -20Q337 -20 223 130T108 537V551ZM229 530Q229 323 312 204T546 84Q767 84 860 279V787Q767 998 548 998Q397 998 313 880T229 530Z" />
+<glyph unicode="e" horiz-adv-x="1058" d="M575 -20Q437 -20 326 48T152 237T90 510V553Q90 709 150 834T319 1030T553 1102Q750 1102 865 968T981 600V533H209V510Q209 326 314 204T580 81Q676 81 749 116T883 228L958 171Q826 -20 575 -20ZM553 1000Q418 1000
+326 901T213 635H862V648Q857 804 773 902T553 1000Z" />
+<glyph unicode="f" horiz-adv-x="678" d="M242 0V984H63V1082H242V1213Q242 1379 326 1468T562 1557Q630 1557 689 1540L680 1440Q630 1452 571 1452Q472 1452 417 1391T362 1216V1082H620V984H362V0H242Z" />
+<glyph unicode="g" horiz-adv-x="1136" d="M108 551Q108 805 220 953T526 1102Q747 1102 862 926L868 1082H980V22Q980 -187 863 -309T546 -431Q433 -431 331 -381T169 -246L236 -174Q363 -330 538 -330Q688 -330 772 -242T859 4V140Q744 -20 524 -20Q336 -20
+222 130T108 535V551ZM229 530Q229 323 312 204T546 84Q767 84 859 282V785Q817 889 738 943T548 998Q397 998 313 880T229 530Z" />
+<glyph unicode="h" horiz-adv-x="1124" d="M275 899Q334 996 426 1049T627 1102Q801 1102 886 1004T972 710V0H853V711Q852 856 792 927T598 998Q487 998 402 929T275 741V0H156V1536H275V899Z" />
+<glyph unicode="i" horiz-adv-x="459" d="M290 0H170V1082H290V0ZM149 1395Q149 1429 171 1452T231 1476T291 1453T314 1395T292 1338T231 1315T171 1338T149 1395Z" />
+<glyph unicode="j" horiz-adv-x="467" d="M285 1082V-129Q285 -279 213 -358T1 -437Q-53 -437 -104 -418L-102 -319Q-58 -332 -12 -332Q166 -332 166 -127V1082H285ZM226 1476Q265 1476 287 1453T309 1395T287 1338T226 1315Q188 1315 167 1338T145 1395T166 1452T226
+1476Z" />
+<glyph unicode="k" horiz-adv-x="1003" d="M413 545L276 413V0H156V1536H276V553L389 675L803 1082H954L495 626L994 0H851L413 545Z" />
+<glyph unicode="l" horiz-adv-x="459" d="M290 0H170V1536H290V0Z" />
+<glyph unicode="m" horiz-adv-x="1815" d="M265 1082L269 906Q329 1004 419 1053T619 1102Q875 1102 944 892Q1002 993 1099 1047T1313 1102Q1661 1102 1668 722V0H1548V713Q1547 858 1486 928T1285 998Q1156 996 1067 915T968 716V0H848V722Q847 861 783 929T584
+998Q471 998 390 934T270 742V0H150V1082H265Z" />
+<glyph unicode="n" horiz-adv-x="1125" d="M270 1082L274 897Q335 997 426 1049T627 1102Q801 1102 886 1004T972 710V0H853V711Q852 856 792 927T598 998Q487 998 402 929T275 741V0H156V1082H270Z" />
+<glyph unicode="o" horiz-adv-x="1147" d="M90 557Q90 713 150 838T321 1032T572 1102Q788 1102 922 951T1056 549V524Q1056 367 996 242T825 48T574 -20Q359 -20 225 131T90 533V557ZM210 524Q210 330 310 206T574 81Q736 81 836 205T937 534V557Q937 681 891
+784T762 943T572 1000Q412 1000 311 875T210 546V524Z" />
+<glyph unicode="p" horiz-adv-x="1135" d="M1026 530Q1026 277 914 129T614 -20Q392 -20 274 136V-416H155V1082H266L272 929Q389 1102 611 1102Q805 1102 915 955T1026 547V530ZM906 551Q906 758 821 878T584 998Q474 998 395 945T274 791V272Q317 179 397 130T586
+81Q737 81 821 201T906 551Z" />
+<glyph unicode="q" horiz-adv-x="1142" d="M108 551Q108 805 220 953T528 1102Q747 1102 861 935L867 1082H979V-416H859V134Q741 -20 526 -20Q336 -20 222 130T108 535V551ZM229 530Q229 320 313 201T548 81Q763 81 859 268V798Q814 895 735 947T550 1000Q399
+1000 314 881T229 530Z" />
+<glyph unicode="r" horiz-adv-x="689" d="M656 980Q618 987 575 987Q463 987 386 925T275 743V0H156V1082H273L275 910Q370 1102 580 1102Q630 1102 659 1089L656 980Z" />
+<glyph unicode="s" horiz-adv-x="1037" d="M804 275Q804 364 733 418T517 502T294 572T176 669T137 807Q137 935 244 1018T518 1102Q699 1102 808 1013T918 779H798Q798 874 719 937T518 1000Q400 1000 329 948T257 811Q257 730 316 686T533 604T769 525T886 424T924
+281Q924 144 814 62T525 -20Q336 -20 219 71T101 303H221Q228 198 309 140T525 81Q650 81 727 136T804 275Z" />
+<glyph unicode="t" horiz-adv-x="658" d="M342 1359V1082H566V984H342V263Q342 173 374 129T483 85Q513 85 580 95L585 -3Q538 -20 457 -20Q334 -20 278 51T222 262V984H23V1082H222V1359H342Z" />
+<glyph unicode="u" horiz-adv-x="1125" d="M852 137Q744 -20 507 -20Q334 -20 244 80T152 378V1082H271V393Q271 84 521 84Q781 84 850 299V1082H970V0H854L852 137Z" />
+<glyph unicode="v" horiz-adv-x="985" d="M493 165L822 1082H945L541 0H444L38 1082H161L493 165Z" />
+<glyph unicode="w" horiz-adv-x="1544" d="M415 249L433 156L457 254L717 1082H819L1076 261L1104 147L1127 252L1349 1082H1473L1158 0H1056L778 858L765 917L752 857L479 0H377L63 1082H186L415 249Z" />
+<glyph unicode="x" horiz-adv-x="996" d="M496 643L788 1082H930L563 551L946 0H805L497 458L189 0H48L430 551L63 1082H204L496 643Z" />
+<glyph unicode="y" horiz-adv-x="973" d="M499 172L815 1082H944L482 -184L458 -240Q369 -437 183 -437Q140 -437 91 -423L90 -324L152 -330Q240 -330 294 -287T387 -137L440 9L32 1082H163L499 172Z" />
+<glyph unicode="z" horiz-adv-x="996" d="M235 101H938V0H87V88L743 979H107V1082H894V993L235 101Z" />
+<glyph unicode="{" horiz-adv-x="676" d="M637 -404Q469 -354 384 -241T299 59V280Q299 543 68 543V647Q299 647 299 908V1137Q300 1320 384 1433T637 1597L663 1518Q419 1440 419 1127V914Q419 668 235 595Q419 518 419 277V49Q423 -243 666 -324L637 -404Z" />
+<glyph unicode="|" horiz-adv-x="452" d="M279 -270H178V1456H279V-270Z" />
+<glyph unicode="}" horiz-adv-x="676" d="M9 -324Q252 -243 256 49V273Q256 526 449 594Q256 662 256 913V1126Q256 1442 12 1518L38 1597Q209 1546 292 1432T376 1131V908Q376 647 607 647V543Q376 543 376 280V59Q376 -128 291 -241T38 -404L9 -324Z" />
+<glyph unicode="~" horiz-adv-x="1402" d="M1254 764Q1254 615 1171 519T958 423Q886 423 824 450T670 558T535 659T441 680Q352 680 303 621T253 450L145 449Q145 598 226 692T441 787Q515 787 581 756T740 643Q807 580 855 555T958 529Q1046 529 1098 592T1150
+764H1254Z" />
+<glyph unicode="&#xa0;" horiz-adv-x="498" />
+<glyph unicode="&#xa1;" horiz-adv-x="452" d="M174 690H285L292 -359H168L174 690ZM305 1022Q305 988 283 965T223 942T163 965T140 1022T162 1079T223 1102T283 1079T305 1022Z" />
+<glyph unicode="&#xa2;" horiz-adv-x="1115" d="M581 81Q704 81 788 150T882 334H997Q989 195 887 97T636 -17V-245H516V-16Q331 7 225 150T119 526V562Q119 784 224 929T516 1098V1318H636V1099Q791 1083 891 978T997 717H882Q874 844 791 922T580 1000Q418 1000
+329 883T239 555V520Q239 313 328 197T581 81Z" />
+<glyph unicode="&#xa3;" horiz-adv-x="1170" d="M404 645L413 368Q415 194 349 104H1094V0H97V104H195Q246 117 272 211Q292 285 290 367L281 645H93V749H277L268 1039Q268 1239 378 1357T674 1476Q856 1476 961 1371T1067 1088H944Q944 1223 869 1297T665 1371Q540
+1371 466 1283T392 1039L401 749H745V645H404Z" />
+<glyph unicode="&#xa4;" horiz-adv-x="1481" d="M1131 133Q1053 61 953 21T740 -20Q514 -20 349 132L194 -26L109 60L268 221Q144 389 144 608Q144 835 277 1006L109 1177L194 1264L361 1094Q526 1234 740 1234T1119 1092L1289 1265L1375 1177L1204 1002Q1334
+832 1334 608Q1334 393 1212 224L1375 60L1289 -27L1131 133ZM257 608Q257 470 321 350T499 161T740 91Q869 91 981 161T1157 350T1221 608Q1221 747 1156 866T979 1054T740 1122T500 1054T323 867T257 608Z" />
+<glyph unicode="&#xa5;" horiz-adv-x="1223" d="M607 734L1028 1456H1171L718 705H1085V616H667V412H1085V324H667V0H544V324H130V412H544V616H130V705H496L44 1456H187L607 734Z" />
+<glyph unicode="&#xa6;" horiz-adv-x="444" d="M159 -270V501H279V-270H159ZM279 698H159V1456H279V698Z" />
+<glyph unicode="&#xa7;" horiz-adv-x="1239" d="M1119 431Q1119 331 1058 262T887 159Q978 111 1026 41T1075 -139Q1075 -303 949 -399T606 -495Q497 -495 401 -467T236 -382Q102 -268 102 -64L222 -62Q222 -218 325 -305T606 -393Q766 -393 860 -324T954 -141Q954
+-64 920 -17T805 69T548 156T284 255T153 378T108 551Q108 651 166 721T331 825Q245 872 199 942T153 1120Q153 1281 282 1378T624 1476Q848 1476 972 1363T1097 1045H977Q977 1191 881 1282T624 1374Q459 1374 366 1306T273 1122Q273 1043 304 996T411 911T646
+828Q842 777 936 726T1075 603T1119 431ZM454 771Q346 758 287 700T228 553Q228 470 263 422T379 336T663 242L755 214Q867 227 933 284T999 428Q999 526 932 585T692 700L454 771Z" />
+<glyph unicode="&#xa8;" horiz-adv-x="881" d="M143 1396Q143 1430 164 1453T225 1477Q263 1477 285 1454T308 1396Q308 1363 286 1340T225 1317Q186 1317 165 1340T143 1396ZM580 1395Q580 1429 602 1452T662 1476Q701 1476 723 1453T745 1395Q745 1362 723 1339T662
+1316Q624 1316 602 1339T580 1395Z" />
+<glyph unicode="&#xa9;" horiz-adv-x="1637" d="M1121 607Q1121 455 1039 374T807 293T566 399T474 686V776Q474 950 566 1056T807 1163T1039 1083T1122 850H1023Q1023 1074 807 1074Q701 1074 637 993T573 771V680Q573 546 636 465T807 383Q913 383 967 436T1022
+607H1121ZM192 729Q192 553 273 399T502 155T817 65Q984 65 1129 154T1357 396T1441 729Q1441 907 1358 1059T1130 1300T817 1389Q646 1389 499 1298T272 1055T192 729ZM107 729Q107 931 200 1104T459 1376T817 1476T1174 1377T1432 1104T1526 729Q1526 532 1436
+360T1181 84T817 -21Q620 -21 455 82T198 358T107 729Z" />
+<glyph unicode="&#xaa;" horiz-adv-x="906" d="M649 705Q634 748 628 799Q541 691 406 691Q289 691 223 749T157 908Q157 1018 240 1079T486 1140H625V1201Q625 1286 585 1333T464 1380Q374 1380 323 1345T271 1237L164 1243Q164 1345 247 1410T464 1476Q588 1476
+661 1405T734 1199V884Q734 792 760 705H649ZM426 786Q479 786 536 816T625 890V1058H496Q266 1058 266 912Q266 786 426 786Z" />
+<glyph unicode="&#xab;" horiz-adv-x="933" d="M247 792L523 404H418L123 783V802L418 1181H523L247 792ZM556 536L832 148H727L432 527V546L727 925H832L556 536Z" />
+<glyph unicode="&#xac;" horiz-adv-x="1117" d="M936 386H816V670H124V776H936V386Z" />
+<glyph unicode="&#xad;" horiz-adv-x="586" d="M528 592H49V693H528V592Z" />
+<glyph unicode="&#xae;" horiz-adv-x="1642" d="M102 729Q102 931 195 1104T454 1376T812 1476T1169 1377T1428 1104T1522 729Q1522 530 1431 358T1175 83T812 -21T450 82T193 358T102 729ZM187 729Q187 550 270 396T499 154T812 65T1125 153T1353 396T1436 729Q1436
+905 1355 1057T1129 1299T812 1389Q644 1389 499 1301T270 1060T187 729ZM650 666V321H552V1160H810Q957 1160 1036 1099T1115 912Q1115 779 974 715Q1046 689 1074 635T1102 504T1106 394T1119 337V321H1017Q1003 357 1003 503Q1003 592 966 629T838 666H650ZM650
+757H831Q912 757 964 799T1017 910Q1017 995 974 1031T824 1070H650V757Z" />
+<glyph unicode="&#xaf;" horiz-adv-x="874" d="M776 1359H106V1456H776V1359Z" />
+<glyph unicode="&#xb0;" horiz-adv-x="774" d="M630 1226Q630 1122 559 1051T388 980Q287 980 215 1051T143 1226T216 1402T388 1476T558 1403T630 1226ZM233 1226Q233 1159 277 1115T388 1071T497 1115T540 1226Q540 1295 497 1340T388 1385Q323 1385 278 1340T233
+1226Z" />
+<glyph unicode="&#xb1;" horiz-adv-x="1085" d="M609 829H1000V727H609V289H498V727H84V829H498V1267H609V829ZM963 0H128V101H963V0Z" />
+<glyph unicode="&#xb2;" horiz-adv-x="740" d="M667 665H96V740L416 1054Q522 1164 522 1237Q522 1300 482 1338T362 1377Q275 1377 228 1333T181 1215H76Q76 1323 155 1394T360 1465T557 1403T628 1239Q628 1138 510 1016L455 961L229 752H667V665Z" />
+<glyph unicode="&#xb3;" horiz-adv-x="740" d="M267 1107H353Q434 1109 481 1145T529 1241Q529 1303 486 1340T362 1377Q286 1377 238 1340T190 1245H85Q85 1341 163 1403T361 1465Q489 1465 562 1405T635 1243Q635 1187 597 1140T489 1069Q651 1027 651 880Q651
+778 572 716T363 654Q234 654 153 717T71 884H177Q177 822 229 782T366 741Q453 741 499 779T546 883Q546 1025 340 1025H267V1107Z" />
+<glyph unicode="&#xb4;" horiz-adv-x="576" d="M315 1524H460L229 1245H124L315 1524Z" />
+<glyph unicode="&#xb5;" horiz-adv-x="1140" d="M281 1082V446Q281 266 344 174T544 81Q676 81 753 138T859 312V1082H979V0H870L863 154Q765 -20 552 -20Q368 -20 281 105V-416H162V1082H281Z" />
+<glyph unicode="&#xb6;" horiz-adv-x="973" d="M681 0V520H573Q423 520 312 578T142 742T83 988Q83 1201 216 1328T577 1456H801V0H681Z" />
+<glyph unicode="&#xb7;" horiz-adv-x="503" d="M163 717Q163 752 185 776T247 800T310 776T333 717T310 659T247 635T185 658T163 717Z" />
+<glyph unicode="&#xb8;" horiz-adv-x="498" d="M246 0L234 -64Q399 -85 399 -235Q399 -327 320 -381T105 -435L98 -357Q187 -357 243 -325T300 -237Q300 -179 257 -157T124 -127L153 0H246Z" />
+<glyph unicode="&#xb9;" horiz-adv-x="740" d="M464 669H358V1332L126 1262V1352L450 1459H464V669Z" />
+<glyph unicode="&#xba;" horiz-adv-x="922" d="M135 1132Q135 1285 223 1380T458 1476Q605 1476 693 1381T782 1127V1033Q782 880 694 785T460 690Q313 690 224 784T135 1038V1132ZM243 1033Q243 919 299 852T460 785Q559 785 616 851T674 1037V1132Q674 1247
+616 1313T458 1380T301 1312T243 1127V1033Z" />
+<glyph unicode="&#xbb;" horiz-adv-x="928" d="M221 944L516 560V541L221 162H115L391 550L115 944H221ZM540 944L835 560V541L540 162H434L710 550L434 944H540Z" />
+<glyph unicode="&#xbc;" horiz-adv-x="1484" d="M453 664H347V1327L115 1257V1347L439 1454H453V664ZM414 129L340 177L1051 1315L1125 1267L414 129ZM1272 275H1399V187H1272V0H1167V187H768L764 253L1161 789H1272V275ZM878 275H1167V659L1136 609L878 275Z" />
+<glyph unicode="&#xbd;" horiz-adv-x="1548" d="M370 129L296 177L1007 1315L1081 1267L370 129ZM438 664H332V1327L100 1257V1347L424 1454H438V664ZM1436 0H865V75L1185 389Q1291 499 1291 572Q1291 635 1251 673T1131 712Q1044 712 997 668T950 550H845Q845
+658 924 729T1129 800T1326 738T1397 574Q1397 473 1279 351L1224 296L998 87H1436V0Z" />
+<glyph unicode="&#xbe;" horiz-adv-x="1590" d="M558 129L484 177L1195 1315L1269 1267L558 129ZM1387 275H1514V187H1387V0H1282V187H883L879 253L1276 789H1387V275ZM993 275H1282V659L1251 609L993 275ZM314 1107H400Q481 1109 528 1145T576 1241Q576 1303
+533 1340T409 1377Q333 1377 285 1340T237 1245H132Q132 1341 210 1403T408 1465Q536 1465 609 1405T682 1243Q682 1187 644 1140T536 1069Q698 1027 698 880Q698 778 619 716T410 654Q281 654 200 717T118 884H224Q224 822 276 782T413 741Q500 741 546 779T593
+883Q593 1025 387 1025H314V1107Z" />
+<glyph unicode="&#xbf;" horiz-adv-x="940" d="M551 687Q549 564 524 505T405 352T288 228Q207 123 207 -8Q207 -137 274 -207T469 -277Q588 -277 659 -207T732 -20H852Q850 -186 745 -284T469 -383Q291 -383 190 -283T88 -10Q88 101 141 202T337 438Q422 509
+429 618L431 687H551ZM567 1022Q567 988 545 965T485 941T425 964T402 1022Q402 1055 424 1078T485 1101T545 1078T567 1022Z" />
+<glyph unicode="&#xc0;" horiz-adv-x="1279" d="M970 408H309L159 0H30L581 1456H698L1249 0H1121L970 408ZM347 513H931L639 1306L347 513ZM716 1560H607L381 1839H525L716 1560Z" />
+<glyph unicode="&#xc1;" horiz-adv-x="1279" d="M970 408H309L159 0H30L581 1456H698L1249 0H1121L970 408ZM347 513H931L639 1306L347 513ZM762 1839H907L676 1560H571L762 1839Z" />
+<glyph unicode="&#xc2;" horiz-adv-x="1279" d="M970 408H309L159 0H30L581 1456H698L1249 0H1121L970 408ZM347 513H931L639 1306L347 513ZM921 1583V1573H810L642 1756L475 1573H366V1586L604 1841H680L921 1583Z" />
+<glyph unicode="&#xc3;" horiz-adv-x="1279" d="M970 408H309L159 0H30L581 1456H698L1249 0H1121L970 408ZM347 513H931L639 1306L347 513ZM983 1809Q983 1713 927 1655T788 1596Q712 1596 640 1651T510 1706Q463 1706 432 1675T400 1588L310 1591Q310 1683 364
+1743T505 1803Q553 1803 587 1786T651 1748T711 1710T783 1693Q829 1693 861 1726T894 1815L983 1809Z" />
+<glyph unicode="&#xc4;" horiz-adv-x="1279" d="M970 408H309L159 0H30L581 1456H698L1249 0H1121L970 408ZM347 513H931L639 1306L347 513ZM349 1711Q349 1745 370 1768T431 1792Q469 1792 491 1769T514 1711Q514 1678 492 1655T431 1632Q392 1632 371 1655T349
+1711ZM786 1710Q786 1744 808 1767T868 1791Q907 1791 929 1768T951 1710Q951 1677 929 1654T868 1631Q830 1631 808 1654T786 1710Z" />
+<glyph unicode="&#xc5;" horiz-adv-x="1279" d="M970 408H309L159 0H30L581 1456H698L1249 0H1121L970 408ZM347 513H931L639 1306L347 513ZM450 1715Q450 1795 506 1850T643 1905Q722 1905 779 1850T836 1715Q836 1636 781 1582T643 1528T505 1582T450 1715ZM527
+1715Q527 1665 560 1632T643 1599Q692 1599 726 1631T760 1715Q760 1768 725 1801T643 1834Q594 1834 561 1800T527 1715Z" />
+<glyph unicode="&#xc6;" horiz-adv-x="1865" d="M1823 0H1006L989 389H393L163 0H17L898 1456H1762V1354H1068L1091 809H1680V707H1095L1121 101H1823V0ZM460 502H985L950 1331L460 502Z" />
+<glyph unicode="&#xc7;" horiz-adv-x="1330" d="M1215 454Q1190 224 1051 102T679 -20Q517 -20 393 61T200 290T131 630V819Q131 1013 199 1163T394 1394T688 1476Q922 1476 1057 1350T1215 1000H1091Q1045 1371 688 1371Q490 1371 373 1223T255 814V636Q255 384
+369 234T679 84Q872 84 970 176T1091 454H1215ZM728 -9L716 -73Q881 -94 881 -244Q881 -336 802 -390T587 -444L580 -366Q669 -366 725 -334T782 -246Q782 -188 739 -166T606 -136L635 -9H728Z" />
+<glyph unicode="&#xc8;" horiz-adv-x="1165" d="M988 698H307V104H1090V0H184V1456H1085V1351H307V802H988V698ZM693 1566H584L358 1845H502L693 1566Z" />
+<glyph unicode="&#xc9;" horiz-adv-x="1165" d="M988 698H307V104H1090V0H184V1456H1085V1351H307V802H988V698ZM739 1845H884L653 1566H548L739 1845Z" />
+<glyph unicode="&#xca;" horiz-adv-x="1165" d="M988 698H307V104H1090V0H184V1456H1085V1351H307V802H988V698ZM898 1589V1579H787L619 1762L452 1579H343V1592L581 1847H657L898 1589Z" />
+<glyph unicode="&#xcb;" horiz-adv-x="1165" d="M988 698H307V104H1090V0H184V1456H1085V1351H307V802H988V698ZM326 1717Q326 1751 347 1774T408 1798Q446 1798 468 1775T491 1717Q491 1684 469 1661T408 1638Q369 1638 348 1661T326 1717ZM763 1716Q763 1750
+785 1773T845 1797Q884 1797 906 1774T928 1716Q928 1683 906 1660T845 1637Q807 1637 785 1660T763 1716Z" />
+<glyph unicode="&#xcc;" horiz-adv-x="545" d="M334 0H211V1456H334V0ZM348 1566H239L13 1845H157L348 1566Z" />
+<glyph unicode="&#xcd;" horiz-adv-x="545" d="M334 0H211V1456H334V0ZM393 1845H538L307 1566H202L393 1845Z" />
+<glyph unicode="&#xce;" horiz-adv-x="545" d="M334 0H211V1456H334V0ZM553 1589V1579H442L274 1762L107 1579H-2V1592L236 1847H312L553 1589Z" />
+<glyph unicode="&#xcf;" horiz-adv-x="545" d="M334 0H211V1456H334V0ZM-19 1717Q-19 1751 2 1774T63 1798Q101 1798 123 1775T146 1717Q146 1684 124 1661T63 1638Q24 1638 3 1661T-19 1717ZM418 1716Q418 1750 440 1773T500 1797Q539 1797 561 1774T583 1716Q583
+1683 561 1660T500 1637Q462 1637 440 1660T418 1716Z" />
+<glyph unicode="&#xd0;" horiz-adv-x="1371" d="M214 0V689H33V791H214V1456H621Q800 1456 942 1375T1163 1141T1243 795V661Q1243 466 1164 315T942 82T612 0H214ZM645 689H337V104H608Q843 104 982 256T1121 669V797Q1121 1048 984 1199T623 1351H337V791H645V689Z" />
+<glyph unicode="&#xd1;" horiz-adv-x="1454" d="M1268 0H1145L308 1246V0H184V1456H308L1146 209V1456H1268V0ZM1067 1809Q1067 1713 1011 1655T872 1596Q796 1596 724 1651T594 1706Q547 1706 516 1675T484 1588L394 1591Q394 1683 448 1743T589 1803Q637 1803
+671 1786T735 1748T795 1710T867 1693Q913 1693 945 1726T978 1815L1067 1809Z" />
+<glyph unicode="&#xd2;" horiz-adv-x="1386" d="M1260 649Q1260 448 1191 296T992 62T694 -20Q439 -20 282 162T125 655V805Q125 1004 195 1157T395 1393T692 1476T988 1395T1187 1166T1260 823V649ZM1137 807Q1137 1070 1018 1219T692 1368Q489 1368 369 1219T248
+801V649Q248 390 368 239T694 87Q903 87 1020 236T1137 653V807ZM765 1572H656L430 1851H574L765 1572Z" />
+<glyph unicode="&#xd3;" horiz-adv-x="1386" d="M1260 649Q1260 448 1191 296T992 62T694 -20Q439 -20 282 162T125 655V805Q125 1004 195 1157T395 1393T692 1476T988 1395T1187 1166T1260 823V649ZM1137 807Q1137 1070 1018 1219T692 1368Q489 1368 369 1219T248
+801V649Q248 390 368 239T694 87Q903 87 1020 236T1137 653V807ZM811 1851H956L725 1572H620L811 1851Z" />
+<glyph unicode="&#xd4;" horiz-adv-x="1386" d="M1260 649Q1260 448 1191 296T992 62T694 -20Q439 -20 282 162T125 655V805Q125 1004 195 1157T395 1393T692 1476T988 1395T1187 1166T1260 823V649ZM1137 807Q1137 1070 1018 1219T692 1368Q489 1368 369 1219T248
+801V649Q248 390 368 239T694 87Q903 87 1020 236T1137 653V807ZM970 1595V1585H859L691 1768L524 1585H415V1598L653 1853H729L970 1595Z" />
+<glyph unicode="&#xd5;" horiz-adv-x="1386" d="M1260 649Q1260 448 1191 296T992 62T694 -20Q439 -20 282 162T125 655V805Q125 1004 195 1157T395 1393T692 1476T988 1395T1187 1166T1260 823V649ZM1137 807Q1137 1070 1018 1219T692 1368Q489 1368 369 1219T248
+801V649Q248 390 368 239T694 87Q903 87 1020 236T1137 653V807ZM1032 1821Q1032 1725 976 1667T837 1608Q761 1608 689 1663T559 1718Q512 1718 481 1687T449 1600L359 1603Q359 1695 413 1755T554 1815Q602 1815 636 1798T700 1760T760 1722T832 1705Q878 1705
+910 1738T943 1827L1032 1821Z" />
+<glyph unicode="&#xd6;" horiz-adv-x="1386" d="M1260 649Q1260 448 1191 296T992 62T694 -20Q439 -20 282 162T125 655V805Q125 1004 195 1157T395 1393T692 1476T988 1395T1187 1166T1260 823V649ZM1137 807Q1137 1070 1018 1219T692 1368Q489 1368 369 1219T248
+801V649Q248 390 368 239T694 87Q903 87 1020 236T1137 653V807ZM398 1723Q398 1757 419 1780T480 1804Q518 1804 540 1781T563 1723Q563 1690 541 1667T480 1644Q441 1644 420 1667T398 1723ZM835 1722Q835 1756 857 1779T917 1803Q956 1803 978 1780T1000 1722Q1000
+1689 978 1666T917 1643Q879 1643 857 1666T835 1722Z" />
+<glyph unicode="&#xd7;" horiz-adv-x="1072" d="M93 179L451 544L108 894L187 974L529 624L872 974L951 894L608 544L966 179L887 100L529 464L172 100L93 179Z" />
+<glyph unicode="&#xd8;" horiz-adv-x="1386" d="M1260 649Q1260 448 1191 296T992 62T694 -20Q508 -20 375 77L274 -83H170L307 134Q125 318 125 658V805Q125 1004 195 1157T395 1393T692 1476Q916 1476 1064 1336L1171 1505H1274L1125 1268Q1259 1088 1260 807V649ZM248
+649Q248 388 370 235L1002 1237Q883 1368 692 1368Q489 1368 369 1219T248 801V649ZM1137 807Q1137 1018 1057 1160L434 171Q541 87 694 87Q903 87 1020 236T1137 653V807Z" />
+<glyph unicode="&#xd9;" horiz-adv-x="1346" d="M1187 1456V462Q1186 315 1122 206T942 39T674 -20Q444 -20 306 105T162 453V1456H284V471Q284 287 389 186T674 84T958 186T1063 470V1456H1187ZM756 1560H647L421 1839H565L756 1560Z" />
+<glyph unicode="&#xda;" horiz-adv-x="1346" d="M1187 1456V462Q1186 315 1122 206T942 39T674 -20Q444 -20 306 105T162 453V1456H284V471Q284 287 389 186T674 84T958 186T1063 470V1456H1187ZM802 1839H947L716 1560H611L802 1839Z" />
+<glyph unicode="&#xdb;" horiz-adv-x="1346" d="M1187 1456V462Q1186 315 1122 206T942 39T674 -20Q444 -20 306 105T162 453V1456H284V471Q284 287 389 186T674 84T958 186T1063 470V1456H1187ZM961 1583V1573H850L682 1756L515 1573H406V1586L644 1841H720L961 1583Z" />
+<glyph unicode="&#xdc;" horiz-adv-x="1346" d="M1187 1456V462Q1186 315 1122 206T942 39T674 -20Q444 -20 306 105T162 453V1456H284V471Q284 287 389 186T674 84T958 186T1063 470V1456H1187ZM389 1711Q389 1745 410 1768T471 1792Q509 1792 531 1769T554 1711Q554
+1678 532 1655T471 1632Q432 1632 411 1655T389 1711ZM826 1710Q826 1744 848 1767T908 1791Q947 1791 969 1768T991 1710Q991 1677 969 1654T908 1631Q870 1631 848 1654T826 1710Z" />
+<glyph unicode="&#xdd;" horiz-adv-x="1226" d="M611 662L1056 1456H1198L672 548V0H549V548L24 1456H170L611 662ZM732 1833H877L646 1554H541L732 1833Z" />
+<glyph unicode="&#xde;" horiz-adv-x="1214" d="M303 1456V1152H628Q771 1152 877 1101T1039 956T1096 738Q1096 553 974 441T641 324H303V0H183V1456H303ZM303 1051V425H627Q784 425 880 510T976 736T885 961T642 1051H303Z" />
+<glyph unicode="&#xdf;" horiz-adv-x="1200" d="M271 0H151V1127Q151 1327 246 1435T512 1544Q665 1544 760 1460T856 1237Q856 1179 843 1131T794 1019T746 913T733 824Q733 768 774 716T911 593T1051 454T1096 306Q1096 160 990 70T720 -20Q636 -20 545 4T414
+60L448 161Q485 132 562 106T706 80Q828 80 902 144T976 306Q976 367 932 423T797 547T659 681T613 826Q613 922 676 1034T739 1230Q739 1323 676 1382T522 1442Q275 1442 271 1136V0Z" />
+<glyph unicode="&#xe0;" horiz-adv-x="1097" d="M839 0Q821 51 816 151Q753 69 656 25T449 -20Q293 -20 197 67T100 287Q100 445 231 537T598 629H815V752Q815 868 744 934T535 1001Q410 1001 328 937T246 783L126 784Q126 913 246 1007T541 1102Q722 1102 826
+1012T934 759V247Q934 90 967 12V0H839ZM463 86Q583 86 677 144T815 299V537H601Q422 535 321 472T220 297Q220 206 287 146T463 86ZM653 1245H544L318 1524H462L653 1245Z" />
+<glyph unicode="&#xe1;" horiz-adv-x="1097" d="M839 0Q821 51 816 151Q753 69 656 25T449 -20Q293 -20 197 67T100 287Q100 445 231 537T598 629H815V752Q815 868 744 934T535 1001Q410 1001 328 937T246 783L126 784Q126 913 246 1007T541 1102Q722 1102 826
+1012T934 759V247Q934 90 967 12V0H839ZM463 86Q583 86 677 144T815 299V537H601Q422 535 321 472T220 297Q220 206 287 146T463 86ZM699 1524H844L613 1245H508L699 1524Z" />
+<glyph unicode="&#xe2;" horiz-adv-x="1097" d="M839 0Q821 51 816 151Q753 69 656 25T449 -20Q293 -20 197 67T100 287Q100 445 231 537T598 629H815V752Q815 868 744 934T535 1001Q410 1001 328 937T246 783L126 784Q126 913 246 1007T541 1102Q722 1102 826
+1012T934 759V247Q934 90 967 12V0H839ZM463 86Q583 86 677 144T815 299V537H601Q422 535 321 472T220 297Q220 206 287 146T463 86ZM858 1268V1258H747L579 1441L412 1258H303V1271L541 1526H617L858 1268Z" />
+<glyph unicode="&#xe3;" horiz-adv-x="1097" d="M839 0Q821 51 816 151Q753 69 656 25T449 -20Q293 -20 197 67T100 287Q100 445 231 537T598 629H815V752Q815 868 744 934T535 1001Q410 1001 328 937T246 783L126 784Q126 913 246 1007T541 1102Q722 1102 826
+1012T934 759V247Q934 90 967 12V0H839ZM463 86Q583 86 677 144T815 299V537H601Q422 535 321 472T220 297Q220 206 287 146T463 86ZM920 1494Q920 1398 864 1340T725 1281Q649 1281 577 1336T447 1391Q400 1391 369 1360T337 1273L247 1276Q247 1368 301 1428T442
+1488Q490 1488 524 1471T588 1433T648 1395T720 1378Q766 1378 798 1411T831 1500L920 1494Z" />
+<glyph unicode="&#xe4;" horiz-adv-x="1097" d="M839 0Q821 51 816 151Q753 69 656 25T449 -20Q293 -20 197 67T100 287Q100 445 231 537T598 629H815V752Q815 868 744 934T535 1001Q410 1001 328 937T246 783L126 784Q126 913 246 1007T541 1102Q722 1102 826
+1012T934 759V247Q934 90 967 12V0H839ZM463 86Q583 86 677 144T815 299V537H601Q422 535 321 472T220 297Q220 206 287 146T463 86ZM286 1396Q286 1430 307 1453T368 1477Q406 1477 428 1454T451 1396Q451 1363 429 1340T368 1317Q329 1317 308 1340T286 1396ZM723
+1395Q723 1429 745 1452T805 1476Q844 1476 866 1453T888 1395Q888 1362 866 1339T805 1316Q767 1316 745 1339T723 1395Z" />
+<glyph unicode="&#xe5;" horiz-adv-x="1097" d="M839 0Q821 51 816 151Q753 69 656 25T449 -20Q293 -20 197 67T100 287Q100 445 231 537T598 629H815V752Q815 868 744 934T535 1001Q410 1001 328 937T246 783L126 784Q126 913 246 1007T541 1102Q722 1102 826
+1012T934 759V247Q934 90 967 12V0H839ZM463 86Q583 86 677 144T815 299V537H601Q422 535 321 472T220 297Q220 206 287 146T463 86ZM387 1400Q387 1480 443 1535T580 1590Q659 1590 716 1535T773 1400Q773 1321 718 1267T580 1213T442 1267T387 1400ZM464 1400Q464
+1350 497 1317T580 1284Q629 1284 663 1316T697 1400Q697 1453 662 1486T580 1519Q531 1519 498 1485T464 1400Z" />
+<glyph unicode="&#xe6;" horiz-adv-x="1732" d="M1265 -20Q1126 -20 1027 34T867 186Q807 88 693 34T440 -20Q271 -20 178 64T85 293Q85 450 195 539T511 632H781V720Q781 852 718 926T528 1000Q398 1000 315 935T232 765L113 778Q113 922 229 1012T528 1102Q653
+1102 741 1049T870 889Q930 989 1024 1045T1235 1102Q1431 1102 1543 982T1658 644V538H901V509Q901 308 997 195T1265 81Q1450 81 1589 199L1636 112Q1491 -20 1265 -20ZM458 80Q549 80 642 126T781 236V536H525Q388 536 302 475T207 309L206 289Q206 192 271
+136T458 80ZM1235 1000Q1103 1000 1013 902T904 636H1539V667Q1539 821 1459 910T1235 1000Z" />
+<glyph unicode="&#xe7;" horiz-adv-x="1055" d="M556 81Q681 81 765 151T857 334H972Q967 235 910 154T759 26T556 -20Q343 -20 219 128T94 526V562Q94 722 150 845T310 1035T555 1102Q733 1102 848 996T972 717H857Q849 844 766 922T555 1000Q393 1000 304 883T214
+555V520Q214 313 303 197T556 81ZM589 -9L577 -73Q742 -94 742 -244Q742 -336 663 -390T448 -444L441 -366Q530 -366 586 -334T643 -246Q643 -188 600 -166T467 -136L496 -9H589Z" />
+<glyph unicode="&#xe8;" horiz-adv-x="1058" d="M575 -20Q437 -20 326 48T152 237T90 510V553Q90 709 150 834T319 1030T553 1102Q750 1102 865 968T981 600V533H209V510Q209 326 314 204T580 81Q676 81 749 116T883 228L958 171Q826 -20 575 -20ZM553 1000Q418
+1000 326 901T213 635H862V648Q857 804 773 902T553 1000ZM640 1245H531L305 1524H449L640 1245Z" />
+<glyph unicode="&#xe9;" horiz-adv-x="1058" d="M575 -20Q437 -20 326 48T152 237T90 510V553Q90 709 150 834T319 1030T553 1102Q750 1102 865 968T981 600V533H209V510Q209 326 314 204T580 81Q676 81 749 116T883 228L958 171Q826 -20 575 -20ZM553 1000Q418
+1000 326 901T213 635H862V648Q857 804 773 902T553 1000ZM686 1524H831L600 1245H495L686 1524Z" />
+<glyph unicode="&#xea;" horiz-adv-x="1058" d="M575 -20Q437 -20 326 48T152 237T90 510V553Q90 709 150 834T319 1030T553 1102Q750 1102 865 968T981 600V533H209V510Q209 326 314 204T580 81Q676 81 749 116T883 228L958 171Q826 -20 575 -20ZM553 1000Q418
+1000 326 901T213 635H862V648Q857 804 773 902T553 1000ZM845 1268V1258H734L566 1441L399 1258H290V1271L528 1526H604L845 1268Z" />
+<glyph unicode="&#xeb;" horiz-adv-x="1058" d="M575 -20Q437 -20 326 48T152 237T90 510V553Q90 709 150 834T319 1030T553 1102Q750 1102 865 968T981 600V533H209V510Q209 326 314 204T580 81Q676 81 749 116T883 228L958 171Q826 -20 575 -20ZM553 1000Q418
+1000 326 901T213 635H862V648Q857 804 773 902T553 1000ZM273 1396Q273 1430 294 1453T355 1477Q393 1477 415 1454T438 1396Q438 1363 416 1340T355 1317Q316 1317 295 1340T273 1396ZM710 1395Q710 1429 732 1452T792 1476Q831 1476 853 1453T875 1395Q875 1362
+853 1339T792 1316Q754 1316 732 1339T710 1395Z" />
+<glyph unicode="&#xec;" horiz-adv-x="456" d="M288 0H168V1082H288V0ZM305 1233H196L-30 1512H114L305 1233Z" />
+<glyph unicode="&#xed;" horiz-adv-x="456" d="M288 0H168V1082H288V0ZM350 1768H495L264 1489H159L350 1768Z" />
+<glyph unicode="&#xee;" horiz-adv-x="456" d="M288 0H168V1082H288V0ZM510 1256V1246H399L231 1429L64 1246H-45V1259L193 1514H269L510 1256Z" />
+<glyph unicode="&#xef;" horiz-adv-x="456" d="M288 0H168V1082H288V0ZM-62 1384Q-62 1418 -41 1441T20 1465Q58 1465 80 1442T103 1384Q103 1351 81 1328T20 1305Q-19 1305 -40 1328T-62 1384ZM375 1383Q375 1417 397 1440T457 1464Q496 1464 518 1441T540 1383Q540
+1350 518 1327T457 1304Q419 1304 397 1327T375 1383Z" />
+<glyph unicode="&#xf0;" horiz-adv-x="1191" d="M811 1303Q1049 1053 1055 645V535Q1055 376 999 249T842 51T615 -20Q485 -20 379 41T211 216T149 466Q149 695 268 830T587 965Q687 965 773 927T919 821Q877 1072 709 1240L484 1101L433 1174L639 1302Q502 1408
+296 1475L335 1578Q577 1506 744 1366L938 1487L989 1414L811 1303ZM935 625L933 682Q894 765 807 813T609 861Q448 861 359 756T269 466Q269 363 314 274T438 134T619 83Q760 83 847 207T935 543V625Z" />
+<glyph unicode="&#xf1;" horiz-adv-x="1125" d="M270 1082L274 897Q335 997 426 1049T627 1102Q801 1102 886 1004T972 710V0H853V711Q852 856 792 927T598 998Q487 998 402 929T275 741V0H156V1082H270ZM916 1493Q916 1397 860 1339T721 1280Q645 1280 573 1335T443
+1390Q396 1390 365 1359T333 1272L243 1275Q243 1367 297 1427T438 1487Q486 1487 520 1470T584 1432T644 1394T716 1377Q762 1377 794 1410T827 1499L916 1493Z" />
+<glyph unicode="&#xf2;" horiz-adv-x="1147" d="M90 557Q90 713 150 838T321 1032T572 1102Q788 1102 922 951T1056 549V524Q1056 367 996 242T825 48T574 -20Q359 -20 225 131T90 533V557ZM210 524Q210 330 310 206T574 81Q736 81 836 205T937 534V557Q937 681
+891 784T762 943T572 1000Q412 1000 311 875T210 546V524ZM645 1245H536L310 1524H454L645 1245Z" />
+<glyph unicode="&#xf3;" horiz-adv-x="1147" d="M90 557Q90 713 150 838T321 1032T572 1102Q788 1102 922 951T1056 549V524Q1056 367 996 242T825 48T574 -20Q359 -20 225 131T90 533V557ZM210 524Q210 330 310 206T574 81Q736 81 836 205T937 534V557Q937 681
+891 784T762 943T572 1000Q412 1000 311 875T210 546V524ZM691 1524H836L605 1245H500L691 1524Z" />
+<glyph unicode="&#xf4;" horiz-adv-x="1147" d="M90 557Q90 713 150 838T321 1032T572 1102Q788 1102 922 951T1056 549V524Q1056 367 996 242T825 48T574 -20Q359 -20 225 131T90 533V557ZM210 524Q210 330 310 206T574 81Q736 81 836 205T937 534V557Q937 681
+891 784T762 943T572 1000Q412 1000 311 875T210 546V524ZM850 1268V1258H739L571 1441L404 1258H295V1271L533 1526H609L850 1268Z" />
+<glyph unicode="&#xf5;" horiz-adv-x="1147" d="M90 557Q90 713 150 838T321 1032T572 1102Q788 1102 922 951T1056 549V524Q1056 367 996 242T825 48T574 -20Q359 -20 225 131T90 533V557ZM210 524Q210 330 310 206T574 81Q736 81 836 205T937 534V557Q937 681
+891 784T762 943T572 1000Q412 1000 311 875T210 546V524ZM912 1493Q912 1397 856 1339T717 1280Q641 1280 569 1335T439 1390Q392 1390 361 1359T329 1272L239 1275Q239 1367 293 1427T434 1487Q482 1487 516 1470T580 1432T640 1394T712 1377Q758 1377 790 1410T823
+1499L912 1493Z" />
+<glyph unicode="&#xf6;" horiz-adv-x="1147" d="M90 557Q90 713 150 838T321 1032T572 1102Q788 1102 922 951T1056 549V524Q1056 367 996 242T825 48T574 -20Q359 -20 225 131T90 533V557ZM210 524Q210 330 310 206T574 81Q736 81 836 205T937 534V557Q937 681
+891 784T762 943T572 1000Q412 1000 311 875T210 546V524ZM278 1396Q278 1430 299 1453T360 1477Q398 1477 420 1454T443 1396Q443 1363 421 1340T360 1317Q321 1317 300 1340T278 1396ZM715 1395Q715 1429 737 1452T797 1476Q836 1476 858 1453T880 1395Q880 1362
+858 1339T797 1316Q759 1316 737 1339T715 1395Z" />
+<glyph unicode="&#xf7;" horiz-adv-x="1164" d="M1070 644H72V760H1070V644ZM495 1088Q495 1123 517 1147T579 1171T642 1147T665 1088T642 1030T579 1006T517 1029T495 1088ZM495 291Q495 326 517 350T579 374T642 350T665 291T642 233T579 210T517 233T495 291Z" />
+<glyph unicode="&#xf8;" horiz-adv-x="1140" d="M89 557Q89 713 149 838T320 1032T571 1102Q685 1102 785 1054L863 1214H957L857 1010Q951 938 1003 821T1055 557V524Q1055 368 994 242T823 48T573 -20Q465 -20 373 21L294 -140H200L299 63Q199 134 144 253T89
+524V557ZM208 524Q208 414 243 319T348 163L737 957Q662 1000 571 1000Q410 1000 309 875T208 546V524ZM935 557Q935 660 902 751T806 905L419 115Q487 81 573 81Q734 81 834 205T935 534V557Z" />
+<glyph unicode="&#xf9;" horiz-adv-x="1125" d="M852 137Q744 -20 507 -20Q334 -20 244 80T152 378V1082H271V393Q271 84 521 84Q781 84 850 299V1082H970V0H854L852 137ZM647 1245H538L312 1524H456L647 1245Z" />
+<glyph unicode="&#xfa;" horiz-adv-x="1125" d="M852 137Q744 -20 507 -20Q334 -20 244 80T152 378V1082H271V393Q271 84 521 84Q781 84 850 299V1082H970V0H854L852 137ZM693 1524H838L607 1245H502L693 1524Z" />
+<glyph unicode="&#xfb;" horiz-adv-x="1125" d="M852 137Q744 -20 507 -20Q334 -20 244 80T152 378V1082H271V393Q271 84 521 84Q781 84 850 299V1082H970V0H854L852 137ZM852 1268V1258H741L573 1441L406 1258H297V1271L535 1526H611L852 1268Z" />
+<glyph unicode="&#xfc;" horiz-adv-x="1125" d="M852 137Q744 -20 507 -20Q334 -20 244 80T152 378V1082H271V393Q271 84 521 84Q781 84 850 299V1082H970V0H854L852 137ZM280 1396Q280 1430 301 1453T362 1477Q400 1477 422 1454T445 1396Q445 1363 423 1340T362
+1317Q323 1317 302 1340T280 1396ZM717 1395Q717 1429 739 1452T799 1476Q838 1476 860 1453T882 1395Q882 1362 860 1339T799 1316Q761 1316 739 1339T717 1395Z" />
+<glyph unicode="&#xfd;" horiz-adv-x="973" d="M499 172L815 1082H944L482 -184L458 -240Q369 -437 183 -437Q140 -437 91 -423L90 -324L152 -330Q240 -330 294 -287T387 -137L440 9L32 1082H163L499 172ZM633 1524H778L547 1245H442L633 1524Z" />
+<glyph unicode="&#xfe;" horiz-adv-x="1150" d="M1031 530Q1031 277 919 129T618 -20Q397 -20 279 136V-416H159V1536H279V932Q396 1102 616 1102Q808 1102 919 956T1031 548V530ZM911 551Q911 758 826 878T589 998Q479 998 400 945T279 791V270Q321 180 400 131T591
+81Q742 81 826 201T911 551Z" />
+<glyph unicode="&#xff;" horiz-adv-x="973" d="M499 172L815 1082H944L482 -184L458 -240Q369 -437 183 -437Q140 -437 91 -423L90 -324L152 -330Q240 -330 294 -287T387 -137L440 9L32 1082H163L499 172ZM220 1396Q220 1430 241 1453T302 1477Q340 1477 362 1454T385
+1396Q385 1363 363 1340T302 1317Q263 1317 242 1340T220 1396ZM657 1395Q657 1429 679 1452T739 1476Q778 1476 800 1453T822 1395Q822 1362 800 1339T739 1316Q701 1316 679 1339T657 1395Z" />
+<glyph unicode="&#x2013;" horiz-adv-x="1334" d="M1417 686H415V788H1417V686Z" />
+<glyph unicode="&#x2014;" horiz-adv-x="1580" d="M1462 686H126V788H1462V686Z" />
+<glyph unicode="&#x2018;" horiz-adv-x="364" d="M238 1554L310 1503Q220 1385 217 1249V1121H98V1233Q98 1325 135 1410T238 1554Z" />
+<glyph unicode="&#x2019;" horiz-adv-x="364" d="M133 1099L62 1151Q152 1272 155 1405V1536H273V1435Q273 1226 133 1099Z" />
+<glyph unicode="&#x201a;" horiz-adv-x="353" d="M112 -231L41 -179Q124 -68 132 51L133 205H252V104Q252 -104 112 -231Z" />
+<glyph unicode="&#x201c;" horiz-adv-x="612" d="M239 1554L311 1503Q221 1385 218 1249V1121H99V1233Q99 1325 136 1410T239 1554ZM490 1554L562 1503Q472 1385 469 1249V1121H350V1233Q350 1325 387 1410T490 1554Z" />
+<glyph unicode="&#x201d;" horiz-adv-x="617" d="M139 1099L68 1151Q158 1272 161 1405V1536H279V1435Q279 1226 139 1099ZM383 1099L312 1151Q402 1272 405 1405V1536H523V1435Q523 1226 383 1099Z" />
+<glyph unicode="&#x201e;" horiz-adv-x="593" d="M112 -240L41 -188Q130 -65 133 73V236H252V106Q252 -111 112 -240ZM346 -240L275 -188Q363 -66 366 73V236H486V106Q486 -111 346 -240Z" />
+<glyph unicode="&#x2022;" horiz-adv-x="662" d="M146 752Q146 831 197 881T331 931Q413 931 464 883T517 757V717Q517 636 466 588T332 540Q248 540 197 589T146 719V752Z" />
+<glyph unicode="&#x2039;" horiz-adv-x="609" d="M232 555L508 167H403L108 546V565L403 944H508L232 555Z" />
+<glyph unicode="&#x203a;" horiz-adv-x="609" d="M203 944L498 560V541L203 162H97L373 550L97 944H203Z" />
+</font>
+</defs>
+</svg>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.ttf
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.ttf b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.ttf
new file mode 100644
index 0000000..66bc5ab
Binary files /dev/null and b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.ttf differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.woff
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.woff b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.woff
new file mode 100644
index 0000000..7e6c479
Binary files /dev/null and b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.woff differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.woff2
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.woff2 b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.woff2
new file mode 100644
index 0000000..c34c128
Binary files /dev/null and b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.woff2 differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.eot
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.eot b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.eot
new file mode 100644
index 0000000..d26bc8f
Binary files /dev/null and b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-regular.eot differ


[16/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/DefaultSupplier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/DefaultSupplier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/DefaultSupplier.java
new file mode 100644
index 0000000..a9edbe7
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/DefaultSupplier.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+import com.google.common.base.Supplier;
+
+/**
+ * Create instances of classes
+ * for which no constructors have been specified
+ * @param <T>
+ */
+public class DefaultSupplier<T> implements Supplier<T>{
+
+    private Class<T> clazz;
+    T instance;
+
+    public DefaultSupplier(T instance) {
+        this.instance = instance;
+    }
+
+    public DefaultSupplier(Class<T> clazz) throws IllegalAccessException, InstantiationException {
+        this.clazz = clazz;
+    }
+
+    /**
+     * Get the instance
+     * @return
+     */
+    @Override
+    public T get() {
+        if(clazz != null){
+            try {
+                return clazz.newInstance();
+            } catch (InstantiationException e) {
+                e.printStackTrace();
+                return null;
+            } catch (IllegalAccessException e) {
+                return null;
+            }
+        } else {
+            return instance;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/Either.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/Either.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/Either.java
new file mode 100644
index 0000000..9b47148
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/Either.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+import com.google.common.base.Optional;
+
+/**
+ * Simple implementation of a container class which can
+ * hold one of two values
+ * <p>
+ * Callers should check if the value if left or right before
+ * trying to get the value
+ *
+ * @param <L> Left Value
+ * @param <R> Right value
+ */
+public class Either<L, R> {
+
+  private final Optional<L> left;
+  private final Optional<R> right;
+
+
+  public boolean isLeft() {
+    return left.isPresent() && !right.isPresent();
+  }
+
+  public boolean isRight() {
+    return !left.isPresent() && right.isPresent();
+  }
+
+  public boolean isNone() { return  !(left.isPresent() || right.isPresent()); }
+
+  public L getLeft() {
+    return left.orNull();
+  }
+
+  public R getRight() {
+    return right.orNull();
+  }
+
+
+  private Either(Optional<L> left, Optional<R> right) {
+    this.left = left;
+    this.right = right;
+  }
+
+
+  public static <L, R> Either<L, R> left(L value) {
+    return new Either<>(Optional.of(value), Optional.<R>absent());
+  }
+
+  public static <L, R> Either<L, R> right(R value) {
+    return new Either<>(Optional.<L>absent(), Optional.of(value));
+  }
+
+  public static <L, R> Either<L, R> none() {
+    return new Either<>(Optional.<L>absent(), Optional.<R>absent());
+  }
+
+}
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java
new file mode 100644
index 0000000..e66b9ab
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class HdfsApiSupplier implements ContextSupplier<Optional<HdfsApi>> {
+
+  protected final Logger LOG =
+    LoggerFactory.getLogger(getClass());
+
+  private static final Map<String, HdfsApi> hdfsApiMap = new ConcurrentHashMap<>();
+  private final Object lock = new Object();
+
+  @Override
+  public Optional<HdfsApi> get(ViewContext context) {
+    try {
+      if(!hdfsApiMap.containsKey(getKey(context))) {
+        synchronized (lock) {
+          if(!hdfsApiMap.containsKey(getKey(context))) {
+            LOG.debug("Creating HDFSApi instance for Viewname: {}, Instance Name: {}", context.getViewName(), context.getInstanceName());
+            HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+            hdfsApiMap.put(getKey(context), api);
+            return Optional.of(api);
+          }
+        }
+      }
+      return Optional.of(hdfsApiMap.get(getKey(context)));
+    } catch (HdfsApiException e) {
+      LOG.error("Cannot get the HDFS API", e);
+      return Optional.absent();
+    }
+  }
+
+  private String getKey(ViewContext context) {
+    return context.getUsername() + ":" + context.getInstanceName();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveConnectionWrapper.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveConnectionWrapper.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveConnectionWrapper.java
new file mode 100644
index 0000000..f5f1ff1
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveConnectionWrapper.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Supplier;
+import org.apache.ambari.view.hive20.AuthParams;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hive.jdbc.HiveConnection;
+
+import java.io.IOException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+
+/**
+ * Composition over a Hive jdbc connection
+ * This class only provides a connection over which
+ * callers should run their own JDBC statements
+ */
+public class HiveConnectionWrapper implements Connectable, Supplier<HiveConnection> {
+
+  private static String DRIVER_NAME = "org.apache.hive.jdbc.HiveDriver";
+  public static final String SUFFIX = "validating the login";
+  private final String jdbcUrl;
+  private final String username;
+  private final String password;
+  private final AuthParams authParams;
+
+  private UserGroupInformation ugi;
+
+  private HiveConnection connection = null;
+  private boolean authFailed;
+
+  public HiveConnectionWrapper(String jdbcUrl, String username, String password, AuthParams authParams) {
+    this.jdbcUrl = jdbcUrl;
+    this.username = username;
+    this.password = password;
+    this.authParams = authParams;
+  }
+
+  @Override
+  public void connect() throws ConnectionException {
+    try {
+      Class.forName(DRIVER_NAME);
+    } catch (ClassNotFoundException e) {
+      throw new ConnectionException(e, "Cannot load the hive JDBC driver");
+    }
+
+    try {
+      ugi = UserGroupInformation.createProxyUser(username, authParams.getProxyUser());
+    } catch (IOException e) {
+      throw new ConnectionException(e, "Cannot set kerberos authentication for getting connection.");
+    }
+
+    try {
+      Connection conn = ugi.doAs(new PrivilegedExceptionAction<Connection>() {
+        @Override
+        public Connection run() throws Exception {
+          return DriverManager.getConnection(jdbcUrl, username, password);
+        }
+      });
+      connection = (HiveConnection) conn;
+    } catch (UndeclaredThrowableException exception) {
+      // Check if the reason was an auth error
+      Throwable undeclaredThrowable = exception.getUndeclaredThrowable();
+      if (undeclaredThrowable instanceof SQLException) {
+        SQLException sqlException = (SQLException) undeclaredThrowable;
+        if (isLoginError(sqlException))
+          authFailed = true;
+        throw new ConnectionException(sqlException, "Cannot open a hive connection with connect string " + jdbcUrl);
+      }
+
+    } catch (IOException | InterruptedException e) {
+      throw new ConnectionException(e, "Cannot open a hive connection with connect string " + jdbcUrl);
+    }
+
+  }
+
+  @Override
+  public void reconnect() throws ConnectionException {
+
+  }
+
+  @Override
+  public void disconnect() throws ConnectionException {
+    if (connection != null) {
+      try {
+        connection.close();
+      } catch (SQLException e) {
+        throw new ConnectionException(e, "Cannot close the hive connection with connect string " + jdbcUrl);
+      }
+    }
+  }
+
+  private boolean isLoginError(SQLException ce) {
+    return ce.getCause().getMessage().toLowerCase().endsWith(SUFFIX);
+  }
+
+  /**
+   * True when the connection is unauthorized
+   *
+   * @return
+   */
+  @Override
+  public boolean isUnauthorized() {
+    return authFailed;
+  }
+
+  public Optional<HiveConnection> getConnection() {
+    return Optional.fromNullable(connection);
+  }
+
+  @Override
+  public boolean isOpen() {
+    try {
+      return connection != null && !connection.isClosed() && connection.isValid(100);
+    } catch (SQLException e) {
+      // in case of an SQ error just return
+      return false;
+    }
+  }
+
+  /**
+   * Retrieves an instance of the appropriate type. The returned object may or
+   * may not be a new instance, depending on the implementation.
+   *
+   * @return an instance of the appropriate type
+   */
+  @Override
+  public HiveConnection get() {
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveQuery.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveQuery.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveQuery.java
new file mode 100644
index 0000000..5ab384e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveQuery.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+
+import com.google.common.base.Function;
+import com.google.common.collect.FluentIterable;
+
+import javax.annotation.Nullable;
+import java.util.Arrays;
+import java.util.Collection;
+
+/**
+ * Holder for query submitted by the user
+ * This may contain multiple hive queries
+ */
+public class HiveQuery {
+
+    private String query;
+
+    public HiveQuery(String query) {
+        this.query = query;
+    }
+
+    public HiveQueries fromMultiLineQuery(String multiLineQuery){
+        return new HiveQueries(multiLineQuery);
+    }
+
+
+    public static class HiveQueries{
+
+        static final String QUERY_SEP = ";";
+        Collection<HiveQuery> hiveQueries;
+
+        private HiveQueries(String userQuery) {
+            hiveQueries = FluentIterable.from(Arrays.asList(userQuery.split(QUERY_SEP)))
+                    .transform(new Function<String, HiveQuery>() {
+                        @Nullable
+                        @Override
+                        public HiveQuery apply(@Nullable String input) {
+                            return new HiveQuery(input.trim());
+                        }
+                    }).toList();
+        }
+
+
+
+
+
+    }
+
+
+
+
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveResult.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveResult.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveResult.java
new file mode 100644
index 0000000..6e47e46
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveResult.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+import com.google.common.collect.Lists;
+
+import java.math.RoundingMode;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+
+public class HiveResult implements Iterator<HiveResult.Row> {
+
+    public static final String NULL = "NULL";
+    private static final int DEFAULT_BATCH_SIZE = 50;
+    private static ResultSetMetaData metaData;
+    private Row colNames;
+    private NumberFormat nf = new DecimalFormat();
+    private List<Row> rows = Lists.newArrayList();
+
+    public HiveResult(ResultSet rs) throws SQLException {
+        nf.setRoundingMode(RoundingMode.FLOOR);
+        nf.setMinimumFractionDigits(0);
+        nf.setMaximumFractionDigits(2);
+        metaData = rs.getMetaData();
+        int columnCount = metaData.getColumnCount();
+        colNames = new Row(columnCount);
+        int index = 0;
+        while (rs.next() && index <DEFAULT_BATCH_SIZE){
+            index ++;
+            rows.add(new Row(columnCount,rs));
+        }
+
+
+    }
+
+    public List<Row> getRows(){
+        return rows;
+    }
+
+    public List<Row> getData() {
+        return rows;
+    }
+
+    /**
+     * use the lists iterator
+     *
+     * @return {@code true} if the iteration has more elements
+     */
+    @Override
+    public boolean hasNext() {
+        return rows.iterator().hasNext();
+    }
+
+    /**
+     * Returns the next row in the iteration.
+     *
+     * @return the next element in the iteration
+     */
+    @Override
+    public Row next() {
+        return rows.iterator().next();
+    }
+
+    /**
+     * Removes from the underlying collection the last element returned
+     * by this iterator (optional operation).  This method can be called
+     * only once per call to {@link #next}.  The behavior of an iterator
+     * is unspecified if the underlying collection is modified while the
+     * iteration is in progress in any way other than by calling this
+     * method.
+     *
+     * @throws UnsupportedOperationException if the {@code remove}
+     *                                       operation is not supported by this iterator
+     * @throws IllegalStateException         if the {@code next} method has not
+     *                                       yet been called, or the {@code remove} method has already
+     *                                       been called after the last call to the {@code next}
+     *                                       method
+     */
+    @Override
+    public void remove() {
+        throw new UnsupportedOperationException();
+    }
+
+    public Row getColNames() {
+        return colNames;
+    }
+
+
+    @Override
+    public String toString() {
+        return "HiveResult{" +
+                "colNames=" + colNames +
+                ", rows=" + rows +
+                '}';
+    }
+
+    public class Row {
+        String[] values;
+
+        public Row(int size) throws SQLException {
+            values = new String[size];
+            for (int i = 0; i < size; i++) {
+                values[i] = metaData.getColumnLabel(i + 1);
+            }
+        }
+
+
+        public Row(int size, ResultSet rs) throws SQLException {
+            values = new String[size];
+            for (int i = 0; i < size; i++) {
+                if (nf != null) {
+                    Object object = rs.getObject(i + 1);
+                    if (object == null) {
+                        values[i] = null;
+                    } else if (object instanceof Number) {
+                        values[i] = nf.format(object);
+                    } else {
+                        values[i] = object.toString();
+                    }
+                } else {
+                    values[i] = rs.getString(i + 1);
+                }
+                values[i] = values[i] == null ? NULL : values[i];
+
+            }
+
+        }
+
+        @Override
+        public String toString() {
+            return "Row{" +
+                    "values=" + Arrays.toString(values) +
+                    '}';
+        }
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveTask.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveTask.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveTask.java
new file mode 100644
index 0000000..243de7b
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveTask.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+public interface HiveTask {
+
+    /**
+     * The task id for this task
+     * @return task Id
+     */
+    Long getId();
+
+    /**
+     * The user for which this task was submitted
+     * @return
+     */
+    String getUser();
+
+    /**
+     * The view instance tied to this task
+     * @return
+     */
+    String getInstance();
+
+    /**
+     * Connection properties pulled from the view context and request
+     * @return
+     */
+
+    //Connectable getConnectionClass();
+
+    ConnectionProperties getConnectionProperties();
+
+    HiveQuery.HiveQueries getQueries();
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveTaskMessage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveTaskMessage.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveTaskMessage.java
new file mode 100644
index 0000000..532cea7
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HiveTaskMessage.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+public class HiveTaskMessage implements HiveTask {
+
+    private Long id;
+    private String instance;
+    private ConnectionProperties connectionProps;
+    private HiveQuery.HiveQueries queries;
+    //private Connectable connectable = new HiveConnectionWrapper(connectMessage);
+
+
+    public void setConnectionProps(ConnectionProperties connectionProps) {
+        this.connectionProps = connectionProps;
+    }
+
+    public void setId(Long id) {
+        this.id = id;
+    }
+
+    public void setInstance(String instance) {
+        this.instance = instance;
+    }
+
+    public void setQueries(HiveQuery.HiveQueries queries) {
+        this.queries = queries;
+    }
+
+
+    /*public void setConnectable(Connectable connectable) {
+        this.connectable = connectable;
+    }*/
+
+    /**
+     * The task id for this task
+     *
+     * @return task Id
+     */
+    @Override
+    public Long getId() {
+        return id;
+    }
+
+    /**
+     * The user for which this task was submitted
+     *
+     * @return
+     */
+    @Override
+    public String getUser() {
+        return connectionProps.getUserName();
+    }
+
+    /**
+     * The view instance tied to this task
+     *
+     * @return
+     */
+    @Override
+    public String getInstance() {
+        return instance;
+    }
+
+    /**
+     * Connection properties pulled from the view context and request
+     *
+     * @return
+     */
+    /*@Override
+    public Connectable getConnectionClass() {
+        return connectable;
+    }
+*/
+    /**
+     * Connection properties pulled from the view context and request
+     *
+     * @return
+     */
+    @Override
+    public ConnectionProperties getConnectionProperties() {
+        return connectionProps;
+    }
+
+    @Override
+    public HiveQuery.HiveQueries getQueries() {
+        return queries;
+    }
+
+
+    @Override
+    public String toString() {
+        return "HiveTaskMessage{" +
+                "connectionProps=" + connectionProps +
+                ", id=" + id +
+                ", instance='" + instance + '\'' +
+                ", queries=" + queries +
+                '}';
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/SafeViewContext.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/SafeViewContext.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/SafeViewContext.java
new file mode 100644
index 0000000..2f9f337
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/SafeViewContext.java
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal;
+
+import org.apache.ambari.view.AmbariStreamProvider;
+import org.apache.ambari.view.DataStore;
+import org.apache.ambari.view.HttpImpersonator;
+import org.apache.ambari.view.ImpersonatorSetting;
+import org.apache.ambari.view.ResourceProvider;
+import org.apache.ambari.view.SecurityException;
+import org.apache.ambari.view.URLConnectionProvider;
+import org.apache.ambari.view.URLStreamProvider;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.ViewController;
+import org.apache.ambari.view.ViewDefinition;
+import org.apache.ambari.view.ViewInstanceDefinition;
+import org.apache.ambari.view.cluster.Cluster;
+
+import java.util.Collection;
+import java.util.Map;
+
+/**
+ * Wrapper to ViewContext. This delegates all the method calls to wrapped ViewContext object excepting for
+ * #getUsername() and #getLoggedinUser(). At the creation time, the username and loggedinuser are store
+ * in instance variable. This was done to bypass the ThreadLocal variables implicitly used in actual viewContext.
+ * So, object of this class should be used in the ActorSystem.
+ */
+public class SafeViewContext implements ViewContext {
+  private final ViewContext viewContext;
+  private final String username;
+  private final String loggedinUser;
+
+  public SafeViewContext(ViewContext viewContext) {
+    this.viewContext = viewContext;
+    username = viewContext.getUsername();
+    loggedinUser = viewContext.getLoggedinUser();
+  }
+
+  @Override
+  public String getUsername() {
+    return username;
+  }
+
+  @Override
+  public String getLoggedinUser() {
+    return loggedinUser;
+  }
+
+  @Override
+  public void hasPermission(String userName, String permissionName) throws SecurityException {
+    viewContext.hasPermission(userName, permissionName);
+  }
+
+  @Override
+  public String getViewName() {
+    return viewContext.getViewName();
+  }
+
+  @Override
+  public ViewDefinition getViewDefinition() {
+    return viewContext.getViewDefinition();
+  }
+
+  @Override
+  public String getInstanceName() {
+    return viewContext.getInstanceName();
+  }
+
+  @Override
+  public ViewInstanceDefinition getViewInstanceDefinition() {
+    return viewContext.getViewInstanceDefinition();
+  }
+
+  @Override
+  public Map<String, String> getProperties() {
+    return viewContext.getProperties();
+  }
+
+  @Override
+  public void putInstanceData(String key, String value) {
+    viewContext.putInstanceData(key, value);
+  }
+
+  @Override
+  public String getInstanceData(String key) {
+    return viewContext.getInstanceData(key);
+  }
+
+  @Override
+  public Map<String, String> getInstanceData() {
+    return viewContext.getInstanceData();
+  }
+
+  @Override
+  public void removeInstanceData(String key) {
+    viewContext.removeInstanceData(key);
+  }
+
+  @Override
+  public String getAmbariProperty(String key) {
+    return viewContext.getAmbariProperty(key);
+  }
+
+  @Override
+  public ResourceProvider<?> getResourceProvider(String type) {
+    return viewContext.getResourceProvider(type);
+  }
+
+  @Override
+  public URLStreamProvider getURLStreamProvider() {
+    return viewContext.getURLStreamProvider();
+  }
+
+  @Override
+  public URLConnectionProvider getURLConnectionProvider() {
+    return viewContext.getURLConnectionProvider();
+  }
+
+  @Override
+  public AmbariStreamProvider getAmbariStreamProvider() {
+    return viewContext.getAmbariStreamProvider();
+  }
+
+  @Override
+  public AmbariStreamProvider getAmbariClusterStreamProvider() {
+    return viewContext.getAmbariClusterStreamProvider();
+  }
+
+  @Override
+  public DataStore getDataStore() {
+    return viewContext.getDataStore();
+  }
+
+  @Override
+  public Collection<ViewDefinition> getViewDefinitions() {
+    return viewContext.getViewDefinitions();
+  }
+
+  @Override
+  public Collection<ViewInstanceDefinition> getViewInstanceDefinitions() {
+    return viewContext.getViewInstanceDefinitions();
+  }
+
+  @Override
+  public ViewController getController() {
+    return viewContext.getController();
+  }
+
+  @Override
+  public HttpImpersonator getHttpImpersonator() {
+    return viewContext.getHttpImpersonator();
+  }
+
+  @Override
+  public ImpersonatorSetting getImpersonatorSetting() {
+    return viewContext.getImpersonatorSetting();
+  }
+
+  @Override
+  public Cluster getCluster() {
+    return viewContext.getCluster();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java
new file mode 100644
index 0000000..2876348
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+import org.apache.commons.lang3.builder.EqualsBuilder;
+
+/**
+ *
+ */
+public class ColumnInfo {
+  private String name;
+  // TODO : to be broken into datatype + precision + scale for better comparison
+  private String type;
+  private Integer precision;
+  private Integer scale;
+  private String comment;
+
+  public ColumnInfo(String name, String type, Integer precision, Integer scale, String comment) {
+    this.name = name;
+    this.type = type;
+    this.precision = precision;
+    this.scale = scale;
+    this.comment = comment;
+  }
+
+  public ColumnInfo(String name, String type, String comment) {
+    this.name = name;
+    this.type = type;
+    this.comment = comment;
+  }
+
+  public ColumnInfo(String name, String type) {
+    this.name = name;
+    this.type = type;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public Integer getPrecision() {
+    return precision;
+  }
+
+  public void setPrecision(Integer precision) {
+    this.precision = precision;
+  }
+
+  public Integer getScale() {
+    return scale;
+  }
+
+  public void setScale(Integer scale) {
+    this.scale = scale;
+  }
+
+  public String getComment() {
+    return comment;
+  }
+
+  @Override
+  public int hashCode() {
+    int result = name.hashCode();
+    result = 31 * result + type.hashCode();
+    result = 31 * result + (precision != null ? precision.hashCode() : 0);
+    result = 31 * result + (scale != null ? scale.hashCode() : 0);
+    result = 31 * result + (comment != null ? comment.hashCode() : 0);
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+
+    if (o == null || getClass() != o.getClass()) return false;
+
+    ColumnInfo that = (ColumnInfo) o;
+
+    return new EqualsBuilder()
+        .append(getName(), that.getName())
+        .append(getType(), that.getType())
+        .append(getComment(), that.getComment())
+        .isEquals();
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder sb = new StringBuilder("ColumnInfo{");
+    sb.append("name='").append(name).append('\'');
+    sb.append(", type='").append(type).append('\'');
+    sb.append(", precision=").append(precision);
+    sb.append(", scale=").append(scale);
+    sb.append(", comment='").append(comment).append('\'');
+    sb.append('}');
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnOrder.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnOrder.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnOrder.java
new file mode 100644
index 0000000..23757dc
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnOrder.java
@@ -0,0 +1,54 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+public class ColumnOrder {
+  private String columnName;
+  private Order order;
+
+  public ColumnOrder(String columnName, Order order) {
+    this.columnName = columnName;
+    this.order = order;
+  }
+
+  public String getColumnName() {
+    return columnName;
+  }
+
+  public void setColumnName(String columnName) {
+    this.columnName = columnName;
+  }
+
+  public Order getOrder() {
+    return order;
+  }
+
+  public void setOrder(Order order) {
+    this.order = order;
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder sb = new StringBuilder("ColumnOrder{");
+    sb.append("columnName='").append(columnName).append('\'');
+    sb.append(", order=").append(order);
+    sb.append('}');
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DatabaseInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DatabaseInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DatabaseInfo.java
new file mode 100644
index 0000000..d8ea207
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DatabaseInfo.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * DTO object to store the Database info
+ */
+public class DatabaseInfo {
+  private String name;
+  private Set<TableInfo> tables = new HashSet<>();
+
+  public DatabaseInfo(String name) {
+    this.name = name;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public Set<TableInfo> getTables() {
+    return tables;
+  }
+
+  public void setTables(Set<TableInfo> tables) {
+    this.tables = tables;
+  }
+
+  public void addTable(TableInfo tableInfo) {
+    this.tables.add(tableInfo);
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+
+    if (o == null || getClass() != o.getClass()) return false;
+
+    DatabaseInfo info = (DatabaseInfo) o;
+
+    return new EqualsBuilder()
+        .append(getName(), info.getName())
+        .isEquals();
+  }
+
+  @Override
+  public int hashCode() {
+    return new HashCodeBuilder(17, 37)
+        .append(getName())
+        .toHashCode();
+  }
+
+  @Override
+  public String toString() {
+    return "DatabaseInfo{" +
+        "name='" + name + '\'' +
+        ", tables=" + tables +
+        '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DatabaseResponse.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DatabaseResponse.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DatabaseResponse.java
new file mode 100644
index 0000000..3eb98f1
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DatabaseResponse.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ *
+ */
+@JsonInclude(JsonInclude.Include.NON_NULL)
+public class DatabaseResponse {
+  private String id;
+  private String name;
+  private Set<TableResponse> tables;
+
+  public String getId() {
+    return id;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public Set<TableResponse> getTables() {
+    return tables;
+  }
+
+  public void addTable(TableResponse table) {
+    if(tables == null) {
+      tables = new HashSet<>();
+    }
+    tables.add(table);
+  }
+
+  public void addAllTables(Collection<TableResponse> tableResponses) {
+    if(tables == null) {
+      tables = new HashSet<>();
+    }
+    tables.addAll(tableResponses);
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DetailedTableInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DetailedTableInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DetailedTableInfo.java
new file mode 100644
index 0000000..84a562a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/DetailedTableInfo.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+import java.util.Map;
+
+/**
+ *
+ */
+public class DetailedTableInfo {
+  private String tableName;
+  private String dbName;
+  private String owner;
+  private String createTime;
+  private String lastAccessTime;
+  private String retention;
+  private String tableType;
+  private String location;
+  private Map<String, String> parameters;
+
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getDbName() {
+    return dbName;
+  }
+
+  public void setDbName(String dbName) {
+    this.dbName = dbName;
+  }
+
+  public String getOwner() {
+    return owner;
+  }
+
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  public String getCreateTime() {
+    return createTime;
+  }
+
+  public void setCreateTime(String createTime) {
+    this.createTime = createTime;
+  }
+
+  public String getLastAccessTime() {
+    return lastAccessTime;
+  }
+
+  public void setLastAccessTime(String lastAccessTime) {
+    this.lastAccessTime = lastAccessTime;
+  }
+
+  public String getRetention() {
+    return retention;
+  }
+
+  public void setRetention(String retention) {
+    this.retention = retention;
+  }
+
+  public String getTableType() {
+    return tableType;
+  }
+
+  public void setTableType(String tableType) {
+    this.tableType = tableType;
+  }
+
+  public Map<String, String> getParameters() {
+    return parameters;
+  }
+
+  public void setParameters(Map<String, String> parameters) {
+    this.parameters = parameters;
+  }
+
+  public String getLocation() {
+    return location;
+  }
+
+  public void setLocation(String location) {
+    this.location = location;
+  }
+
+  @Override
+  public String toString() {
+    return "DetailedTableInfo{" +
+        "tableName='" + tableName + '\'' +
+        ", dbName='" + dbName + '\'' +
+        ", owner='" + owner + '\'' +
+        ", createTime='" + createTime + '\'' +
+        ", lastAccessTime='" + lastAccessTime + '\'' +
+        ", retention='" + retention + '\'' +
+        ", tableType='" + tableType + '\'' +
+        ", location='" + location + '\'' +
+        ", parameters=" + parameters +
+        '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/Order.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/Order.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/Order.java
new file mode 100644
index 0000000..42b7339
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/Order.java
@@ -0,0 +1,37 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+public enum Order {
+  DESC (0),
+  ASC (1);
+
+  private static Order[] allValues = values();
+  public static Order fromOrdinal(int n) {return allValues[n];}
+
+  private final int ordinal;
+
+  Order(int ordinal){
+    this.ordinal = ordinal;
+  }
+
+  public int getOrdinal(){
+    return ordinal;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/PartitionInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/PartitionInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/PartitionInfo.java
new file mode 100644
index 0000000..d9a428a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/PartitionInfo.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+import java.util.List;
+
+/**
+ *
+ */
+public class PartitionInfo {
+  private final List<ColumnInfo> columns;
+
+
+  public PartitionInfo(List<ColumnInfo> columns) {
+    this.columns = columns;
+  }
+
+  public List<ColumnInfo> getColumns() {
+    return columns;
+  }
+
+  @Override
+  public String toString() {
+    return "PartitionInfo{" +
+        "columns=" + columns +
+        '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/Section.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/Section.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/Section.java
new file mode 100644
index 0000000..49f2a49
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/Section.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+/**
+ *
+ */
+public abstract class Section {
+  private String sectionMarker;
+  private String sectionStartMarker;
+  private String sectionEndMarker;
+
+  public Section(String sectionMarker, String sectionStartMarker, String sectionEndMarker) {
+    this.sectionMarker = sectionMarker;
+    this.sectionStartMarker = sectionStartMarker;
+    this.sectionEndMarker = sectionEndMarker;
+  }
+
+  public String getSectionMarker() {
+    return sectionMarker;
+  }
+
+  public String getSectionStartMarker() {
+    return sectionStartMarker;
+  }
+
+  public String getSectionEndMarker() {
+    return sectionEndMarker;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/StorageInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/StorageInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/StorageInfo.java
new file mode 100644
index 0000000..0bd4b41
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/StorageInfo.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ *
+ */
+public class StorageInfo {
+  private String serdeLibrary;
+  private String inputFormat;
+  private String outputFormat;
+  private String compressed;
+  private String numBuckets;
+  private List<String> bucketCols;
+  private List<ColumnOrder> sortCols;
+  private String fileFormat;
+  private Map<String, String> parameters;
+
+  public String getFileFormat() {
+    return fileFormat;
+  }
+
+  public void setFileFormat(String fileFormat) {
+    this.fileFormat = fileFormat;
+  }
+
+  public String getSerdeLibrary() {
+    return serdeLibrary;
+  }
+
+  public void setSerdeLibrary(String serdeLibrary) {
+    this.serdeLibrary = serdeLibrary;
+  }
+
+  public String getInputFormat() {
+    return inputFormat;
+  }
+
+  public void setInputFormat(String inputFormat) {
+    this.inputFormat = inputFormat;
+  }
+
+  public String getOutputFormat() {
+    return outputFormat;
+  }
+
+  public void setOutputFormat(String outputFormat) {
+    this.outputFormat = outputFormat;
+  }
+
+  public String getCompressed() {
+    return compressed;
+  }
+
+  public void setCompressed(String compressed) {
+    this.compressed = compressed;
+  }
+
+  public String getNumBuckets() {
+    return numBuckets;
+  }
+
+  public void setNumBuckets(String numBuckets) {
+    this.numBuckets = numBuckets;
+  }
+
+  public List<String> getBucketCols() {
+    return bucketCols;
+  }
+
+  public void setBucketCols(List<String> bucketCols) {
+    this.bucketCols = bucketCols;
+  }
+
+  public List<ColumnOrder> getSortCols() {
+    return sortCols;
+  }
+
+  public void setSortCols(List<ColumnOrder> sortCols) {
+    this.sortCols = sortCols;
+  }
+
+  public Map<String, String> getParameters() {
+    return parameters;
+  }
+
+  public void setParameters(Map<String, String> parameters) {
+    this.parameters = parameters;
+  }
+
+  @Override
+  public String toString() {
+    return "StorageInfo{" +
+        "serdeLibrary='" + serdeLibrary + '\'' +
+        ", inputFormat='" + inputFormat + '\'' +
+        ", outputFormat='" + outputFormat + '\'' +
+        ", compressed='" + compressed + '\'' +
+        ", numBuckets='" + numBuckets + '\'' +
+        ", bucketCols='" + bucketCols + '\'' +
+        ", sortCols='" + sortCols + '\'' +
+        ", fileFormat='" + fileFormat + '\'' +
+        ", parameters=" + parameters +
+        '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java
new file mode 100644
index 0000000..41be0a0
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableInfo.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
+/**
+ * DTO object to store the Table information
+ */
+public class TableInfo {
+  private String name;
+  private String type;
+
+  public TableInfo(String name, String type) {
+    this.name = name;
+    this.type = type;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+
+    if (o == null || getClass() != o.getClass()) return false;
+
+    TableInfo info = (TableInfo) o;
+
+    return new EqualsBuilder()
+        .append(getName(), info.getName())
+        .isEquals();
+  }
+
+  @Override
+  public int hashCode() {
+    return new HashCodeBuilder(17, 37)
+        .append(getName())
+        .toHashCode();
+  }
+
+  @Override
+  public String toString() {
+    return "TableInfo{" +
+        "name='" + name + '\'' +
+        ", type='" + type + '\'' +
+        '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableMeta.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableMeta.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableMeta.java
new file mode 100644
index 0000000..f47e76c
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableMeta.java
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ *
+ */
+public class TableMeta implements Serializable{
+  private String id;
+  private String database;
+  private String table;
+  private List<ColumnInfo> columns;
+  private String ddl;
+  private PartitionInfo partitionInfo;
+  private DetailedTableInfo detailedInfo;
+  private StorageInfo storageInfo;
+  private ViewInfo viewInfo;
+
+  public String getId() {
+    return id;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  public String getDatabase() {
+    return database;
+  }
+
+  public void setDatabase(String database) {
+    this.database = database;
+  }
+
+  public String getTable() {
+    return table;
+  }
+
+  public void setTable(String table) {
+    this.table = table;
+  }
+
+  public List<ColumnInfo> getColumns() {
+    return columns;
+  }
+
+  public void setColumns(List<ColumnInfo> columns) {
+    this.columns = columns;
+  }
+
+  public String getDdl() {
+    return ddl;
+  }
+
+  public void setDdl(String ddl) {
+    this.ddl = ddl;
+  }
+
+  public PartitionInfo getPartitionInfo() {
+    return partitionInfo;
+  }
+
+  public void setPartitionInfo(PartitionInfo partitionInfo) {
+    this.partitionInfo = partitionInfo;
+  }
+
+  public DetailedTableInfo getDetailedInfo() {
+    return detailedInfo;
+  }
+
+  public void setDetailedInfo(DetailedTableInfo detailedInfo) {
+    this.detailedInfo = detailedInfo;
+  }
+
+  public StorageInfo getStorageInfo() {
+    return storageInfo;
+  }
+
+  public void setStorageInfo(StorageInfo storageInfo) {
+    this.storageInfo = storageInfo;
+  }
+
+  public ViewInfo getViewInfo() {
+    return viewInfo;
+  }
+
+  public void setViewInfo(ViewInfo viewInfo) {
+    this.viewInfo = viewInfo;
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder sb = new StringBuilder("TableMeta{");
+    sb.append("id='").append(id).append('\'');
+    sb.append(", database='").append(database).append('\'');
+    sb.append(", table='").append(table).append('\'');
+    sb.append(", columns=").append(columns);
+    sb.append(", ddl='").append(ddl).append('\'');
+    sb.append(", partitionInfo=").append(partitionInfo);
+    sb.append(", detailedInfo=").append(detailedInfo);
+    sb.append(", storageInfo=").append(storageInfo);
+    sb.append(", viewInfo=").append(viewInfo);
+    sb.append('}');
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java
new file mode 100644
index 0000000..09e1ea9
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableResponse.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+/**
+ *
+ */
+public class TableResponse {
+  private String id;
+  private String name;
+  private String type;
+  private String databaseId;
+
+  public String getId() {
+    return id;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public String getDatabaseId() {
+    return databaseId;
+  }
+
+  public void setDatabaseId(String databaseId) {
+    this.databaseId = databaseId;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ViewInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ViewInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ViewInfo.java
new file mode 100644
index 0000000..2e23014
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ViewInfo.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+/**
+ *
+ */
+public class ViewInfo {
+  private String originalText;
+  private String extendedText;
+
+
+  public String getOriginalText() {
+    return originalText;
+  }
+
+  public void setOriginalText(String originalText) {
+    this.originalText = originalText;
+  }
+
+  public String getExtendedText() {
+    return extendedText;
+  }
+
+  public void setExtendedText(String extendedText) {
+    this.extendedText = extendedText;
+  }
+
+  @Override
+  public String toString() {
+    return "ViewInfo{" +
+        "originalText='" + originalText + '\'' +
+        ", extendedText='" + extendedText + '\'' +
+        '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/AbstractTableMetaParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/AbstractTableMetaParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/AbstractTableMetaParser.java
new file mode 100644
index 0000000..3c7ce99
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/AbstractTableMetaParser.java
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.parsers;
+
+
+import org.apache.ambari.view.hive20.client.Row;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ *
+ */
+public abstract class AbstractTableMetaParser<T> implements TableMetaSectionParser<T> {
+  private final String sectionMarker;
+  private final String secondarySectionMarker;
+  private final String sectionStartMarker;
+  private final String sectionEndMarker;
+
+
+  public AbstractTableMetaParser(String sectionMarker, String sectionStartMarker, String sectionEndMarker) {
+    this(sectionMarker, null, sectionStartMarker, sectionEndMarker);
+  }
+
+  public AbstractTableMetaParser(String sectionMarker, String secondarySectionMarker, String sectionStartMarker, String sectionEndMarker) {
+    this.sectionMarker = sectionMarker;
+    this.secondarySectionMarker = secondarySectionMarker;
+    this.sectionStartMarker = sectionStartMarker;
+    this.sectionEndMarker = sectionEndMarker;
+  }
+
+  protected Map<String, Object> parseSection(List<Row> rows) {
+    boolean sectionStarted = false;
+    boolean startMarkerAndEndMarkerIsSame = !(sectionStartMarker == null || sectionEndMarker == null) && sectionStartMarker.equalsIgnoreCase(sectionEndMarker);
+    boolean sectionDataReached = false;
+
+    Map<String, Object> result = new LinkedHashMap<>();
+
+    Iterator<Row> iterator = rows.iterator();
+
+    String currentNestedEntryParent = null;
+    List<Entry> currentNestedEntries = null;
+    boolean processingNestedEntry = false;
+
+    while (iterator.hasNext()) {
+      Row row = iterator.next();
+      String colName = ((String) row.getRow()[0]).trim();
+      String colValue = row.getRow()[1] != null ? ((String) row.getRow()[1]).trim() : null;
+      String colComment = row.getRow()[2] != null ? ((String) row.getRow()[2]).trim() : null;
+
+      if (sectionMarker.equalsIgnoreCase(colName)) {
+        sectionStarted = true;
+      } else {
+        if (sectionStarted) {
+          if (secondarySectionMarker != null && secondarySectionMarker.equalsIgnoreCase(colName) && colValue != null) {
+            continue;
+          }
+
+          if (sectionStartMarker != null && sectionStartMarker.equalsIgnoreCase(colName) && colValue == null) {
+            if (startMarkerAndEndMarkerIsSame) {
+              if (sectionDataReached) {
+                break;
+              }
+            }
+            sectionDataReached = true;
+            continue;
+          } else if (sectionEndMarker != null && sectionEndMarker.equalsIgnoreCase(colName) && colValue == null) {
+            break;
+          } else if (sectionStartMarker == null) {
+            sectionDataReached = true;
+            //continue;
+          }
+
+          if (colValue == null && !processingNestedEntry) {
+            currentNestedEntryParent = colName;
+            currentNestedEntries = new ArrayList<>();
+            processingNestedEntry = true;
+            continue;
+          } else if (colName.equalsIgnoreCase("") && processingNestedEntry) {
+            Entry entry = new Entry(colValue, colComment);
+            currentNestedEntries.add(entry);
+            continue;
+          } else if (processingNestedEntry) {
+            result.put(currentNestedEntryParent, currentNestedEntries);
+            processingNestedEntry = false;
+          }
+
+          Entry entry = new Entry(colName, colValue, colComment);
+          result.put(colName, entry);
+
+        }
+
+      }
+    }
+
+    if (processingNestedEntry) {
+      result.put(currentNestedEntryParent, currentNestedEntries);
+    }
+
+    return result;
+  }
+
+  protected Map<String, String> getMap(Map<String, Object> parsedSection, String key) {
+    Map<String, String> result = new HashMap<>();
+    Object value = parsedSection.get(key);
+    if(value == null) {
+      return null;
+    }
+    if (value instanceof List) {
+      List<Entry> entries = (List<Entry>)value;
+      for(Entry entry: entries) {
+        result.put(entry.getName(), entry.getValue());
+      }
+    }
+    return result;
+  }
+
+  protected String getString(Map<String, Object> parsedSection, String key) {
+    Object value = parsedSection.get(key);
+    if(value == null) {
+      return null;
+    }
+    if (value instanceof Entry) {
+      return ((Entry) parsedSection.get(key)).getValue();
+    }
+    return null;
+  }
+
+
+  public static class Entry {
+    private final String name;
+    private final String value;
+    private final String comment;
+
+    public Entry(String name, String type, String comment) {
+      this.name = name;
+      this.value = type;
+      this.comment = comment;
+    }
+
+    public Entry(String name, String type) {
+      this(name, type, null);
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public String getValue() {
+      return value;
+    }
+
+    public String getComment() {
+      return comment;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ColumnInfoParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ColumnInfoParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ColumnInfoParser.java
new file mode 100644
index 0000000..893854f
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ColumnInfoParser.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
+import org.apache.parquet.Strings;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Parses the columns from the output of 'describe formatted ${tableName}' output
+ */
+public class ColumnInfoParser extends AbstractTableMetaParser<List<ColumnInfo>> {
+  private static final Logger LOG = LoggerFactory.getLogger(ColumnInfoParser.class);
+
+  public ColumnInfoParser() {
+    super("# col_name", "", "");
+  }
+
+  @Override
+  public List<ColumnInfo> parse(List<Row> rows) {
+    List<ColumnInfo> columns = new ArrayList<>();
+    /* General Format: Starts from the first index itself
+     | # col_name                    | data_type                                                                     | comment                      |
+     |                               | NULL                                                                          | NULL                         |
+     | viewtime                      | int                                                                           |                              |
+     | userid                        | bigint                                                                        |                              |
+     | page_url                      | string                                                                        |                              |
+     | referrer_url                  | string                                                                        |                              |
+     | ip                            | string                                                                        | IP Address of the User       |
+     |                               | NULL                                                                          | NULL                         |
+     */
+
+    /*Iterator<Row> iterator = rows.iterator();
+    int index = 0;
+    // Skip first two rows
+    while (index < 2) {
+      iterator.next();
+      index++;
+    }
+
+    while (true) {
+      Row row = iterator.next();
+      // Columns section ends with a empty column name value
+      if (index >= rows.size() || "".equalsIgnoreCase((String) row.getRow()[0]))
+        break;
+
+      String colName = (String)row.getRow()[0];
+      String colType = (String)row.getRow()[1];
+      String colComment = (String)row.getRow()[2];
+
+      columns.add(new ColumnInfo(colName, colType, colComment));
+      index++;
+    }*/
+
+
+    Map<String, Object> parsedSection = parseSection(rows);
+    for(Object obj: parsedSection.values()) {
+      if(obj instanceof Entry) {
+        Entry entry = (Entry)obj;
+        String typeInfo = entry.getValue();
+        // parse precision and scale
+        List<String> typePrecisionScale = ParserUtils.parseColumnDataType(typeInfo);
+        String datatype = typePrecisionScale.get(0);
+        String precisionString = typePrecisionScale.get(1);
+        String scaleString = typePrecisionScale.get(2);
+        Integer precision = !Strings.isNullOrEmpty(precisionString) ? Integer.valueOf(precisionString.trim()): null;
+        Integer scale = !Strings.isNullOrEmpty(scaleString) ? Integer.valueOf(scaleString.trim()): null;
+        ColumnInfo columnInfo = new ColumnInfo(entry.getName(), datatype, precision, scale, entry.getComment());
+        columns.add(columnInfo);
+        LOG.debug("found column definition : {}", columnInfo);
+      }
+    }
+    return columns;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/CreateTableStatementParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/CreateTableStatementParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/CreateTableStatementParser.java
new file mode 100644
index 0000000..89c6ae1
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/CreateTableStatementParser.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import org.apache.ambari.view.hive20.client.Row;
+
+import java.util.List;
+
+/**
+ * Parses the rows and returns the create table statement
+ */
+public class CreateTableStatementParser implements TableMetaSectionParser<String> {
+  @Override
+  public String parse(List<Row> rows) {
+    StringBuilder builder = new StringBuilder();
+    for(Row row: rows) {
+      builder.append(row.getRow()[0]);
+      builder.append("\n");
+    }
+    return builder.toString();
+  }
+}


[08/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/alert-message-display.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/alert-message-display.js b/contrib/views/hive20/src/main/resources/ui/app/components/alert-message-display.js
new file mode 100644
index 0000000..840dbf3
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/alert-message-display.js
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import {shortenText} from '../helpers/shorten-text';
+
+export default Ember.Component.extend({
+  shorten: false,
+  length: 100,
+  expanded: false,
+  shortenedValue: Ember.computed('value', 'shorten', 'expanded', function() {
+    if (this.get('expanded')) {
+      return this.get('value');
+    }
+    if (this.get('shorten')) {
+      let length = this.get('length');
+      let shortenedText = shortenText([this.get('value'), length]);
+      this.set('shorten', shortenedText !== this.get('value'));
+      return shortenedText;
+    } else {
+      return this.get('value');
+    }
+  }),
+
+  actions: {
+    toggleExpanded() {
+      this.toggleProperty('expanded');
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/alert-message.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/alert-message.js b/contrib/views/hive20/src/main/resources/ui/app/components/alert-message.js
new file mode 100644
index 0000000..ef9179a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/alert-message.js
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  classNames: ['flash-messages'],
+
+  actions: {
+    closeAlert() {
+      const flash = this.get('flash');
+      flash.destroyMessage();
+    }
+  }
+
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/column-item.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/column-item.js b/contrib/views/hive20/src/main/resources/ui/app/components/column-item.js
new file mode 100644
index 0000000..d4e43f3
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/column-item.js
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import datatypes from '../configs/datatypes';
+
+export default Ember.Component.extend({
+  tagName: 'tr',
+  advancedOption: false,
+  datatypes: Ember.copy(datatypes),
+
+
+
+  hasPrecision: Ember.computed.oneWay('column.type.hasPrecision'),
+  hasScale: Ember.computed.oneWay('column.type.hasScale'),
+
+  columnMetaType: null,
+
+
+  didInsertElement() {
+    Ember.run.later( () => {
+      this.$('input').focus();
+    });
+  },
+  didReceiveAttrs() {
+    if(this.get('column.isPartitioned')) {
+      this.set('columnMetaType', 'partitioned');
+    } else if(this.get('column.isPartitioned')) {
+      this.set('columnMetaType', 'clustered');
+    } else {
+      this.set('columnMetaType');
+    }
+  },
+
+  actions: {
+    typeSelectionMade(datatype) {
+      this.set('column.type', datatype);
+    },
+
+    advanceOptionToggle() {
+      this.toggleProperty('advancedOption');
+    },
+
+    edit() {
+      this.set('column.editing', true);
+      Ember.run.later(() => {
+        this.$('input').focus();
+      });
+    },
+
+    delete() {
+      console.log('deleting column');
+      this.sendAction('columnDeleted', this.get('column'));
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/create-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/create-table.js b/contrib/views/hive20/src/main/resources/ui/app/components/create-table.js
new file mode 100644
index 0000000..f31d37f
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/create-table.js
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import Helper from '../configs/helpers';
+
+export default Ember.Component.extend({
+  init() {
+    this._super(...arguments);
+    this.set('columns', Ember.A());
+    this.set('properties', []);
+    this.set('settings', {});
+    this.set('shouldAddBuckets', null);
+    this.set('settingErrors', []);
+  },
+
+  didReceiveAttrs() {
+    this.get('tabs').setEach('active', false);
+    let firstTab = this.get('tabs.firstObject')
+    firstTab.set('active', true);
+  },
+
+  actions: {
+    activate(link) {
+      console.log("Activate: ", link);
+    },
+
+    create() {
+      if (this.validate()) {
+        this.sendAction('create', {
+          name: this.get('tableName'),
+          columns: this.get('columns'),
+          settings: this.get('settings'),
+          properties: this.get('properties')
+        });
+      }
+    },
+
+    cancel() {
+      this.sendAction('cancel');
+    }
+  },
+
+  validate() {
+    if (!this.validateTableName()) {
+      return false;
+    }
+    if (!(this.checkColumnsExists() &&
+      this.checkColumnUniqueness() &&
+      this.validateColumns())) {
+      this.selectTab("create.table.columns");
+      return false;
+    }
+
+    if(!(this.validateNumBuckets())) {
+      this.selectTab("create.table.advanced");
+      return false;
+    }
+
+    if (!(this.validateTableProperties())) {
+      this.selectTab("create.table.properties");
+      return false;
+    }
+    return true;
+  },
+  validateTableName() {
+    this.set('hasTableNameError');
+    this.set('tableNameErrorText');
+
+    if (Ember.isEmpty(this.get('tableName'))) {
+      this.set('hasTableNameError', true);
+      this.set('tableNameErrorText', 'Name cannot be empty');
+      return false;
+    }
+
+    return true;
+  },
+
+  checkColumnsExists() {
+    this.set('hasEmptyColumnsError');
+    this.set('emptyColumnsErrorText');
+    if (this.get('columns.length') === 0) {
+      this.set('hasEmptyColumnsError', true);
+      this.set('emptyColumnsErrorText', 'No columns configured. Add some column definitions.');
+      return false;
+    }
+    return true;
+  },
+
+  checkColumnUniqueness() {
+    let columnNames = [];
+    for (let i = 0; i < this.get('columns.length'); i++) {
+      let column = this.get('columns').objectAt(i);
+      column.clearError();
+      if (columnNames.indexOf(column.get('name')) === -1) {
+        columnNames.pushObject(column.get('name'));
+      } else {
+        column.get('errors').push({type: 'name', error: 'Name should be unique'});
+        return false;
+      }
+    }
+
+    return true;
+  },
+
+  validateColumns() {
+    for (let i = 0; i < this.get('columns.length'); i++) {
+      let column = this.get('columns').objectAt(i);
+      if (!column.validate()) {
+        return false;
+      }
+    }
+    return true;
+  },
+
+  validateTableProperties() {
+    for (let i = 0; i < this.get('properties.length'); i++) {
+      let property = this.get('properties').objectAt(i);
+      if (!property.validate()) {
+        return false;
+      }
+    }
+    return true;
+  },
+
+  validateNumBuckets() {
+    let clusteredColumns = this.get('columns').filterBy('isClustered', true);
+    if(clusteredColumns.get('length') > 0 &&
+      (Ember.isEmpty(this.get('settings.numBuckets')) ||
+      !Helper.isInteger(this.get('settings.numBuckets')))) {
+      this.get('settingErrors').pushObject({type: 'numBuckets', error: "Some columns are clustered, Number of buckets are required."});
+      return false;
+    }
+
+    return true;
+  },
+
+  selectTab(link) {
+    this.get('tabs').setEach('active', false);
+    let selectedTab = this.get('tabs').findBy('link', link);
+    if (!Ember.isEmpty(selectedTab)) {
+      selectedTab.set('active', true);
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/database-drop-confirm.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/database-drop-confirm.js b/contrib/views/hive20/src/main/resources/ui/app/components/database-drop-confirm.js
new file mode 100644
index 0000000..2ea1718
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/database-drop-confirm.js
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  name: '',
+  actions: {
+    confirm() {
+      this.sendAction('yes');
+    },
+
+    cancel() {
+      this.sendAction('no');
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/database-not-empty.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/database-not-empty.js b/contrib/views/hive20/src/main/resources/ui/app/components/database-not-empty.js
new file mode 100644
index 0000000..d3cdfc3
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/database-not-empty.js
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  name: '',
+
+  actions: {
+    close() {
+      this.sendAction('close');
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/database-search-bar.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/database-search-bar.js b/contrib/views/hive20/src/main/resources/ui/app/components/database-search-bar.js
new file mode 100644
index 0000000..67828c1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/database-search-bar.js
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  classNames: ['database-search', 'clearfix'],
+  databases: [],
+
+  heading: 'database',
+  subHeading: 'Select or search database/schema',
+  enableSecondaryAction: true,
+  secondaryActionText: 'Browse',
+  secondaryActionFaIcon: 'folder',
+
+  extendDrawer: false,
+  filterText: '',
+
+  selectedDatabase: Ember.computed('databases.@each.selected', function() {
+    return this.get('databases').findBy('selected', true);
+  }),
+
+  filteredDatabases: Ember.computed('filterText', 'databases.@each', function() {
+    return this.get('databases').filter((item) => {
+      return item.get('name').indexOf(this.get('filterText')) !== -1;
+    });
+  }),
+
+  resetDatabaseSelection() {
+    this.get('databases').forEach(x => {
+        if (x.get('selected')) {
+          x.set('selected', false);
+        }
+    });
+  },
+
+  didRender() {
+    this._super(...arguments);
+    this.$('input.display').on('focusin', () => {
+      this.set('extendDrawer', true);
+      Ember.run.later(() => {
+        this.$('input.search').focus();
+      });
+    });
+  },
+
+  actions: {
+    secondaryActionClicked: function() {
+      this.toggleProperty('extendDrawer');
+      Ember.run.later(() => {
+        this.$('input.search').focus();
+      });
+    },
+
+    databaseClicked: function(database) {
+      this.resetDatabaseSelection();
+      database.set('selected', true);
+      this.set('extendDrawer', false);
+      this.set('filterText', '');
+      this.sendAction('selected', database);
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/edit-setting-item.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/edit-setting-item.js b/contrib/views/hive20/src/main/resources/ui/app/components/edit-setting-item.js
new file mode 100644
index 0000000..4bc8787
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/edit-setting-item.js
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  tagName: 'tr',
+  selectedValue: '',
+
+  didReceiveAttrs() {
+    this._super(...arguments);
+    let selectedParameter = this.get('hiveParameters').filterBy('name', this.get('setting.key'));
+    if (selectedParameter.get('length') === 1) {
+      this.set('selectedParam', selectedParameter[0]);
+      this.set('selectedValue', this.get('setting.value'));
+    }
+  },
+  setUserSettingsAddOption: function (list, term) {
+    let filteredList = list.filter(x => x.get('name').toLowerCase().indexOf('Add') !== -1);
+    if (filteredList.get('length') > 0) {
+      list.removeObject(filteredList.objectAt(0));
+    }
+
+    list.unshiftObject(Ember.Object.create({name: `Add '${term}' to list`, actualValue: term}));
+    return list;
+  },
+
+  validate() {
+    let value = this.get('selectedValue');
+    let setting = this.get('selectedParam');
+    let error = "";
+    if (Ember.isEmpty(value)) {
+      return {valid: false, error: "Value cannot be empty"};
+    }
+
+    if (Ember.isEmpty(setting.get('values')) && Ember.isEmpty(setting.get('validate'))) {
+      return {valid: true};
+    }
+
+    if (setting.get('values') && setting.get('values').mapBy('value').contains(value)) {
+      return {valid: true};
+    } else if (setting.get('values')) {
+      error = `Value should be in (${setting.get('values').mapBy('value').join(', ')})`;
+    }
+
+    if (setting.get('validate') && setting.get('validate').test(value)) {
+      return {valid: true};
+    } else if (setting.get('validate')) {
+      error = `Value should be matching regex ${setting.get('validate')}`;
+    }
+
+    return {valid: false, error: error};
+  },
+
+  actions: {
+    searchAction(term) {
+      this.set('currentSearchField', term);
+      // Check for partial Matches
+      let filteredList = this.get('hiveParameters').filter(x => x.get('name').toLowerCase().indexOf(term.toLowerCase()) !== -1);
+      //check for exact matches
+      if ((filteredList.get('length') !== 1) || (filteredList[0].get('name') !== term)) {
+        filteredList = this.setUserSettingsAddOption(filteredList, term);
+      }
+      return filteredList;
+    },
+    selectionMade(selection, list) {
+      this.get('hiveParameters').setEach('disable', false);
+      if (selection.get('name').startsWith('Add')) {
+        let actualValue = selection.get('actualValue');
+        let newParam = Ember.Object.create({name: actualValue, disabled: true});
+        this.set('selectedParam', newParam);
+        this.get('hiveParameters').unshiftObject(newParam);
+      } else {
+        selection.set('disabled', true);
+        this.set('selectedParam', selection);
+      }
+    },
+    cancel() {
+      this.set('setting.editMode', false);
+      this.sendAction('cancelAction', this.get('setting'));
+    },
+    update() {
+      let validationResult = this.validate();
+      if(validationResult.valid) {
+        let selected = this.get('selectedParam');
+        this.set('setting.key', selected.get('name'));
+        this.set('setting.value', this.get('selectedValue') || '');
+        this.sendAction('updateAction', this.get('setting'));
+      } else {
+        this.set('invalid', true);
+        this.set('currentError', validationResult.error);
+      }
+
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/hdfs-viewer-modal.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/hdfs-viewer-modal.js b/contrib/views/hive20/src/main/resources/ui/app/components/hdfs-viewer-modal.js
new file mode 100644
index 0000000..20f3f65
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/hdfs-viewer-modal.js
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import HdfsPickerConfig from '../utils/hdfs-picker-config';
+
+export default Ember.Component.extend({
+  store: Ember.inject.service(),
+  config: null,
+  showSelectedPath: true,
+
+  hdfsLocation: null,
+
+  init() {
+    this._super(...arguments);
+    this.set('config', HdfsPickerConfig.create({store: this.get('store')}));
+  },
+
+  actions: {
+    closeDirectoryViewer() {
+      this.sendAction('close');
+    },
+
+    pathSelected() {
+      this.sendAction('selected', this.get('hdfsLocation'));
+    },
+
+    viewerSelectedPath(data) {
+      this.set('hdfsLocation', data.path);
+    },
+
+    viewerError(err) {
+      console.log("Error", err);
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js b/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js
new file mode 100644
index 0000000..3d37df8
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  jobs: Ember.inject.service(),
+  tagName: '',
+  expanded: false,
+  expandedValue: null,
+  actions: {
+    toggleExpandJob(jobId) {
+      this.toggleProperty('expanded');
+      this.set('valueLoading', true);
+      this.get('jobs').getQuery(jobId).then((queryFile) => {
+        this.set('queryFile', queryFile);
+        this.set('valueLoading', false);
+      }).catch((err) => {
+        this.set('valueLoading', false);
+      })
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js b/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js
new file mode 100644
index 0000000..e3b22e9
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  startTime: null,
+  endTime: null,
+  maxEndTime: null,
+  statusCounts: Ember.computed('jobs', function() {
+    return this.get('jobs').reduce((acc, item, index) => {
+      let status = item.get('status').toLowerCase();
+      if(Ember.isEmpty(acc[status])) {
+        acc[status] = 1;
+      } else {
+        acc[status] = acc[status] + 1;
+      }
+
+      return acc;
+    }, {});
+  }),
+
+
+  actions: {
+    setDateRange(startDate, endDate) {
+      this.sendAction('filterChanged', startDate, endDate);
+    },
+
+    expandJob(jobId) {
+      console.log("Job to be expanded", jobId);
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/list-filter.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/list-filter.js b/contrib/views/hive20/src/main/resources/ui/app/components/list-filter.js
new file mode 100644
index 0000000..d538aa3
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/list-filter.js
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  classNames: ['list-filter'],
+  header: '',
+  subHeader: '',
+  items: [],
+  filterText: '',
+  emptyFilterText: Ember.computed('filterText', function() {
+    return this.get('filterText').length === 0;
+  }),
+  filteredItems: Ember.computed('filterText', 'items.@each', function() {
+    return this.get('items').filter((item) => {
+      return item.get('name').indexOf(this.get('filterText')) !== -1;
+    });
+  }),
+
+  actions: {
+    enableFilter() {
+      this.$('input').focus();
+    },
+
+    disableFilter() {
+      this.set('filterText', '');
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/list-group.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/list-group.js b/contrib/views/hive20/src/main/resources/ui/app/components/list-group.js
new file mode 100644
index 0000000..aeafcde
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/list-group.js
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  classNames: ['list-group']
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/list-item.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/list-item.js b/contrib/views/hive20/src/main/resources/ui/app/components/list-item.js
new file mode 100644
index 0000000..d29495a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/list-item.js
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  tagName: 'a',
+  classNames: ['list-group-item', 'am-view-list-item'],
+  classNameBindings: ['selected:active'],
+  selected: Ember.computed.oneWay('item.selected'),
+  click() {
+    this.sendAction('itemClicked', this.get('item'));
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/multiple-database-search-bar.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/multiple-database-search-bar.js b/contrib/views/hive20/src/main/resources/ui/app/components/multiple-database-search-bar.js
new file mode 100644
index 0000000..384368f
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/multiple-database-search-bar.js
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+
+  classNames: ['multiple-database-search', 'clearfix'],
+
+  databases: [],
+
+  //will make use of these in templates
+  heading: 'database',
+  subHeading: 'Select or search database/schema',
+
+  selectedDatabase: Ember.computed('databases.@each.selected', function() {
+    return this.get('databases').findBy('selected', true);
+  }),
+
+  filteredDatabases: Ember.computed('filterText', 'databases.@each', function() {
+    return this.get('databases').filter((item) => {
+      return item.get('name');
+    });
+  }),
+
+  resetDatabaseSelection() {
+    this.get('databases').forEach(x => {
+      if (x.get('selected')) {
+        x.set('selected', false);
+      }
+    });
+  },
+
+  allDbs: Ember.computed('selectedDatabase','filteredDatabases', function() {
+    let dblist =[];
+    this.get('filteredDatabases').forEach(db => {
+      dblist.push(db.get('name'));
+    });
+
+    return dblist;
+  }),
+
+  selectedDbs: Ember.computed('selectedDatabase','filteredDatabases', function() {
+    let selecteddblist =[];
+    selecteddblist.push(this.get('selectedDatabase.name')); //As of now for single selection but will convert this for multiple DBs selected.
+    return selecteddblist;
+  }),
+
+  actions: {
+    createOnEnter(select, e) {
+      if (e.keyCode === 13 && select.isOpen &&
+        !select.highlighted && !Ember.isBlank(select.searchText)) {
+
+        let selected = this.get('selectedDbs');
+        if (!selected.includes(select.searchText)) {
+          this.get('options').pushObject(select.searchText);
+          select.actions.choose(select.searchText);
+        }
+      }
+    },
+
+    updateTables(){
+      console.log('updateTables for selected databases.', this.get('selectedDbs'));
+      this.sendAction('xyz', this.get('selectedDbs'));
+    }
+
+  }
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/property-item.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/property-item.js b/contrib/views/hive20/src/main/resources/ui/app/components/property-item.js
new file mode 100644
index 0000000..96ef473
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/property-item.js
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  tagName: 'tr',
+
+  didInsertElement() {
+    Ember.run.later( () => {
+      this.$('input')[0].focus();
+    });
+  },
+
+
+  actions: {
+    edit() {
+      this.set('property.editing', true);
+      Ember.run.later(() => {
+        this.$('input')[0].focus();
+      });
+    },
+
+    delete() {
+      this.sendAction('propertyItemDeleted', this.get('property'));
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js b/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
new file mode 100644
index 0000000..f08e5a6
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+
+  tagName: "query-editor",
+
+  _initializeEditor: function() {
+
+    var editor,
+      updateSize,
+      self = this;
+
+    updateSize = function () {
+      editor.setSize(self.$(this).width(), self.$(this).height());
+      editor.refresh();
+    };
+
+    this.set('editor', CodeMirror.fromTextArea(document.getElementById('code-mirror'), {
+      mode: 'text/x-hive',
+      hint: CodeMirror.hint.sql,
+      indentWithTabs: true,
+      smartIndent: true,
+      lineNumbers: true,
+      matchBrackets : true,
+      autofocus: true,
+      extraKeys: {'Ctrl-Space': 'autocomplete'}
+    }));
+
+
+    CodeMirror.commands.autocomplete = function (cm) {
+      var lastWord = cm.getValue().split(' ').pop();
+
+      //if user wants to fill in a column
+      if (lastWord.indexOf('.') > -1) {
+        lastWord = lastWord.split('.')[0];
+
+        self.getColumnsHint(cm, lastWord);
+      } else {
+        CodeMirror.showHint(cm);
+      }
+    };
+
+    editor = this.get('editor');
+
+    editor.on('cursorActivity', function () {
+      self.set('highlightedText', editor.getSelections());
+    });
+
+    editor.setValue(this.get('query') || '');
+
+    editor.on('change', function (instance) {
+      Ember.run(function () {
+        self.set('query', instance.getValue());
+      });
+    });
+
+
+  }.on('didInsertElement'),
+
+
+  updateValue: function () {
+    var query = this.get('query');
+    var editor = this.get('editor');
+
+    var isFinalExplainQuery = (query.toUpperCase().trim().indexOf('EXPLAIN') > -1);
+    var editorQuery = editor.getValue();
+
+    if (editor.getValue() !== query) {
+      if(isFinalExplainQuery){
+        editor.setValue(editorQuery || '')
+      }else {
+        editor.setValue(query || '');
+      }
+    }
+
+  }.observes('query'),
+
+
+  actions:{
+  }
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/query-result-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/query-result-table.js b/contrib/views/hive20/src/main/resources/ui/app/components/query-result-table.js
new file mode 100644
index 0000000..1cb1d3d
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/query-result-table.js
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import Table from 'ember-light-table';
+import TableCommon from '../mixins/table-common';
+
+export default Ember.Component.extend({
+
+  classNames: ['query-result-table', 'clearfix'],
+
+  queryResult: {'schema' :[], 'rows' :[]},
+
+  columns: Ember.computed('queryResult', function() {
+    let queryResult = this.get('queryResult');
+    let columnArr =[];
+
+    this.get('queryResult').schema.forEach(function(column){
+      let tempColumn = {};
+
+      tempColumn['label'] = column[0];
+
+      let localValuePath = column[0];
+      tempColumn['valuePath'] = localValuePath.substring(localValuePath.lastIndexOf('.') +1 , localValuePath.length);
+
+      columnArr.push(tempColumn);
+    });
+    return columnArr;
+  }),
+
+  rows: Ember.computed('queryResult','columns', function() {
+    let rowArr = [], self = this;
+
+    if(self.get('columns').length > 0) {
+      self.get('queryResult').rows.forEach(function(row, rowindex){
+        var mylocalObject = {};
+        self.get('columns').forEach(function(column, index){
+          mylocalObject[self.get('columns')[index].valuePath] = row[index];
+        })
+        rowArr.push(mylocalObject);
+      });
+      return rowArr;
+    }
+    return rowArr;
+  }),
+
+  table: Ember.computed('queryResult', 'rows', 'columns', function() {
+    return new Table(this.get('columns'), this.get('rows'));
+  }),
+
+  actions: {
+    onScrolledToBottom() {
+      //this.send('goNextPage');
+      console.log('hook for INFINITE scroll');
+    },
+
+    onColumnClick(column) {
+      console.log('I am in onColumnClick');
+    },
+    goNextPage(){
+      this.sendAction('goNextPage');
+    },
+    goPrevPage(){
+      this.sendAction('goPrevPage');
+    }
+
+  }
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/setting-item.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/setting-item.js b/contrib/views/hive20/src/main/resources/ui/app/components/setting-item.js
new file mode 100644
index 0000000..aa1fce0
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/setting-item.js
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  tagName: 'tr',
+  actions: {
+    edit() {
+      this.sendAction('editAction', this.get('setting'));
+    },
+
+    delete() {
+      this.sendAction('deleteAction', this.get('setting'));
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/setting-list.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/setting-list.js b/contrib/views/hive20/src/main/resources/ui/app/components/setting-list.js
new file mode 100644
index 0000000..72a83a3
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/setting-list.js
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  settings: [],
+  actions: {
+    addNewSettings() {
+      this.sendAction('newSettings');
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/table-advanced-settings.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/table-advanced-settings.js b/contrib/views/hive20/src/main/resources/ui/app/components/table-advanced-settings.js
new file mode 100644
index 0000000..181816a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/table-advanced-settings.js
@@ -0,0 +1,168 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import fileFormats from '../configs/file-format';
+import Helpers from '../configs/helpers';
+
+
+export default Ember.Component.extend({
+
+  classNames: ['create-table-advanced-wrap'],
+  showLocationInput: false,
+  showFileFormatInput: false,
+  showRowFormatInput: false,
+  shouldAddBuckets: false,
+  errors: [],
+
+  settings: {},
+
+  errorsObserver: Ember.observer('errors.@each', function() {
+    let numBucketsError = this.get('errors').findBy('type', 'numBuckets');
+    if(!Ember.isEmpty(numBucketsError)) {
+      this.set('hasNumBucketError', true);
+      this.set('numBucketErrorText', numBucketsError.error);
+    }
+  }).on('init'),
+
+
+  fileFormats: Ember.copy(fileFormats),
+  terminationChars: Ember.computed(function () {
+    return Helpers.getAllTerminationCharacters();
+  }),
+
+  didReceiveAttrs() {
+    if (!Ember.isEmpty(this.get('settings.location'))) {
+      this.set('showLocationInput', true);
+    }
+    if (!Ember.isEmpty(this.get('settings.fileFormat'))) {
+      this.set('showFileFormatInput', true);
+      let currentFileFormat = this.get('fileFormats').findBy('name', this.get('settings.fileFormat.type'));
+      this.set('selectedFileFormat', currentFileFormat);
+      this.set('customFileFormat', currentFileFormat.custom);
+    } else {
+      let defaultFileFormat = this.get('fileFormats').findBy('default', true);
+      this.set('settings.fileFormat', {});
+      debugger;
+      this.set('settings.fileFormat.type', defaultFileFormat.name);
+    }
+    if (!Ember.isEmpty(this.get('settings.rowFormat'))) {
+      this.set('showRowFormatInput', true);
+      this.set('selectedFieldTerminator', this.get('settings.rowFormat.fieldTerminatedBy'));
+      this.set('selectedLinesTerminator', this.get('settings.rowFormat.linesTerminatedBy'));
+      this.set('selectedNullDefinition', this.get('settings.rowFormat.nullDefinedAs'));
+      this.set('selectedEscapeDefinition', this.get('settings.rowFormat.escapeDefinedAs'));
+    }
+  },
+
+  locationInputObserver: Ember.observer('showLocationInput', function () {
+    if (!this.get('showLocationInput')) {
+      this.set('settings.location');
+    }
+  }),
+
+  fileFormatInputObserver: Ember.observer('showFileFormatInput', function () {
+    if (!this.get('showFileFormatInput')) {
+      this.set('settings.fileFormat');
+    } else {
+      this.set('selectedFileFormat', this.get('fileFormats').findBy('default', true));
+    }
+  }),
+
+  rowFormatInputObserver: Ember.observer('showRowFormatInput', function () {
+    if (!this.get('showRowFormatInput')) {
+      this.send('clearFieldTerminator');
+      this.send('clearLinesTerminator');
+      this.send('clearNullDefinition');
+      this.send('clearEscapeDefinition');
+      this.set('settings.rowFormat');
+    } else {
+      this.set('settings.rowFormat', {});
+    }
+  }),
+
+  actions: {
+
+    closeHdfsModal() {
+      this.set('showDirectoryViewer', false);
+    },
+
+    hdfsPathSelected(path) {
+      this.set('settings.location', path);
+      this.set('showDirectoryViewer', false);
+    },
+
+    toggleDirectoryViewer() {
+      this.set('showDirectoryViewer', true);
+    },
+
+    toggleLocation() {
+      this.toggleProperty('showLocationInput');
+    },
+
+    toggleFileFormat() {
+      this.toggleProperty('showFileFormatInput')
+    },
+
+    toggleRowFormat() {
+      this.toggleProperty('showRowFormatInput')
+    },
+
+    fileFormatSelected(format) {
+      this.set('settings.fileFormat.type', format.name);
+      this.set('selectedFileFormat', format);
+      this.set('customFileFormat', format.custom);
+    },
+
+    fieldTerminatorSelected(terminator) {
+      this.set('settings.rowFormat.fieldTerminatedBy', terminator);
+      this.set('selectedFieldTerminator', terminator);
+    },
+    clearFieldTerminator() {
+      this.set('settings.rowFormat.fieldTerminatedBy');
+      this.set('selectedFieldTerminator');
+    },
+
+    linesTerminatorSelected(terminator) {
+      this.set('settings.rowFormat.linesTerminatedBy', terminator);
+      this.set('selectedLinesTerminator', terminator);
+    },
+    clearLinesTerminator() {
+      this.set('settings.rowFormat.linesTerminatedBy');
+      this.set('selectedLinesTerminator');
+    },
+
+    nullDefinedAsSelected(terminator) {
+      this.set('settings.rowFormat.nullDefinedAs', terminator);
+      this.set('selectedNullDefinition', terminator);
+    },
+    clearNullDefinition() {
+      this.set('settings.rowFormat.nullDefinedAs');
+      this.set('selectedNullDefinition');
+    },
+
+    escapeDefinedAsSelected(terminator) {
+      this.set('settings.rowFormat.escapeDefinedAs', terminator);
+      this.set('selectedEscapeDefinition', terminator);
+    },
+    clearEscapeDefinition() {
+      this.set('settings.rowFormat.escapeDefinedAs');
+      this.set('selectedEscapeDefinition');
+    },
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/table-columns.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/table-columns.js b/contrib/views/hive20/src/main/resources/ui/app/components/table-columns.js
new file mode 100644
index 0000000..5479496
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/table-columns.js
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import Column from '../models/column';
+
+export default Ember.Component.extend({
+  columns: [],
+  shouldAddBuckets: null,
+
+  clusteredColumnObserver: Ember.observer('columns.@each.isClustered', function(sender, key, value, rev) {
+    let clusteredColumns = this.get('columns').filterBy('isClustered');
+    if (clusteredColumns.length > 0) {
+      this.set('shouldAddBuckets', true);
+    } else {
+      if(!Ember.isEmpty(this.get('shouldAddBuckets'))) {
+        this.set('shouldAddBuckets', false);
+      }
+
+    }
+  }),
+
+  actions: {
+    addNewColumn() {
+      let newEmptyColumn = Column.create({editing: true});
+      this.get('columns').pushObject(newEmptyColumn);
+    },
+
+    columnDeleted(column) {
+      this.get('columns').removeObject(column);
+    },
+
+    columnUpdated() {
+
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/table-properties.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/table-properties.js b/contrib/views/hive20/src/main/resources/ui/app/components/table-properties.js
new file mode 100644
index 0000000..1ba15cc
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/table-properties.js
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import TableProperty from '../models/table-property';
+
+export default Ember.Component.extend({
+  properties: [],
+
+  actions: {
+    addNewRow() {
+      let emptyProperty = TableProperty.create({editing: true});
+      this.get('properties').pushObject(emptyProperty);
+    },
+
+    itemDeleted(property) {
+      this.get('properties').removeObject(property);
+    },
+
+    itemUpdated(property) {
+
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/tabs-item.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/tabs-item.js b/contrib/views/hive20/src/main/resources/ui/app/components/tabs-item.js
new file mode 100644
index 0000000..e790c55
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/tabs-item.js
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  tagName: 'li',
+  classNameBindings: ['pullRight:pull-right', 'active'],
+  pullRight: Ember.computed.readOnly('tab.pullRight'),
+  active: Ember.computed.alias('tab.active'),
+
+  shouldTransition: Ember.computed('tab.transition', function() {
+    if(!Ember.isEmpty(this.get('tab.transition'))) {
+      return this.get('tab.transition');
+    } else {
+      return true;
+    }
+  }),
+
+  didInsertElement: function() {
+    Ember.run.later(() => this.send('changeActiveState'));
+    this.$('a').click(() => {
+      Ember.run.later(() => {
+        this.send('changeActiveState');
+      });
+    });
+  },
+
+  actions : {
+    selected() {
+      this.get('tabs').forEach((x) => x.set('active', false));
+      this.set('active', true);
+      this.sendAction('activate', this.get('tab.link'));
+    },
+
+    changeActiveState: function() {
+      if(this.get('shouldTransition')) {
+        let classes = this.$('a').attr('class').split(' ');
+        if(classes.contains('active')) {
+          this.get('tabs').forEach((x) => x.set('active', false));
+          this.set('active', true);
+        }
+      }
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/tabs-pane.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/tabs-pane.js b/contrib/views/hive20/src/main/resources/ui/app/components/tabs-pane.js
new file mode 100644
index 0000000..060a062
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/tabs-pane.js
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  tagName: 'ul',
+  classNames: ['row', 'nav', 'nav-tabs'],
+  classNameBindings : ['inverse'],
+  inverse: false,
+  tabs: Ember.A()
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/top-application-bar.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/top-application-bar.js b/contrib/views/hive20/src/main/resources/ui/app/components/top-application-bar.js
new file mode 100644
index 0000000..8828275
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/top-application-bar.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/configs/create-table-tabs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/configs/create-table-tabs.js b/contrib/views/hive20/src/main/resources/ui/app/configs/create-table-tabs.js
new file mode 100644
index 0000000..d1da231
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/configs/create-table-tabs.js
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+let createTableTabs = [
+  Ember.Object.create({
+    name: 'columns',
+    label: 'COLUMNS',
+    transition: false,
+    link: 'create.table.columns',
+    faIcon: 'list'
+  }),
+
+  Ember.Object.create({
+    name: 'advanced',
+    label: 'ADVANCED',
+    transition: false,
+    link: 'create.table.advanced',
+    faIcon: 'file-text-o'
+  }),
+
+  Ember.Object.create({
+    name: 'properties',
+    label: 'TABLE PROPERTIES',
+    transition: false,
+    link: 'create.table.properties',
+    faIcon: 'file-text-o'
+  })
+
+];
+
+export default createTableTabs;

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/configs/datatypes.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/configs/datatypes.js b/contrib/views/hive20/src/main/resources/ui/app/configs/datatypes.js
new file mode 100644
index 0000000..0d07ce2
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/configs/datatypes.js
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export default [
+  {label: "TINYINT", hasPrecision: false, hasScale: false},
+  {label: "SMALLINT", hasPrecision: false, hasScale: false},
+  {label: "INT", hasPrecision: false, hasScale: false},
+  {label: "BIGINT",hasPrecision: false, hasScale: false},
+  {label: "BOOLEAN",hasPrecision: false, hasScale: false},
+  {label: "FLOAT",hasPrecision: false, hasScale: false},
+  {label: "DOUBLE",hasPrecision: false, hasScale: false},
+  {label: "STRING",hasPrecision: false, hasScale: false},
+  {label: "BINARY",hasPrecision: false, hasScale: false},
+  {label: "DATE", hasPrecision: false, hasScale: false},
+  {label: "TIMESTAMP",hasPrecision: false, hasScale: false},
+  {label: "DECIMAL", hasPrecision: true, hasScale: true},
+  {label: "VARCHAR", hasPrecision: true, hasScale: false},
+  {label: "CHAR", hasPrecision: true, hasScale: false},
+]

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/configs/file-format.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/configs/file-format.js b/contrib/views/hive20/src/main/resources/ui/app/configs/file-format.js
new file mode 100644
index 0000000..4042b63
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/configs/file-format.js
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export default [
+  {name: "SEQUENCEFILE", default: false, custom: false},
+  {name: "TEXTFILE", default: false, custom: false},
+  {name: "RCFILE", default: false, custom: false},
+  {name: "ORC", default: true, custom: false},
+  {name: "AVRO", default: false, custom: false},
+  {name: "CUSTOM SerDe", default: false, custom: true},
+];

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/configs/helpers.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/configs/helpers.js b/contrib/views/hive20/src/main/resources/ui/app/configs/helpers.js
new file mode 100644
index 0000000..025caa6
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/configs/helpers.js
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import nonPrintableChars from './non-printable-escape-chars';
+
+/* globals moment */
+
+export default Ember.Object.create({
+  isInteger: function (x) {
+    return !isNaN(x);
+  },
+
+  isDate: function (date) {
+    return moment(date).isValid();
+  },
+
+  regexes: {
+    allUppercase: /^[^a-z]*$/,
+    whitespaces: /^(\s*).*$/,
+    digits: /^\d+$/,
+    name: /\w+/ig,
+    dotPath: /[a-z.]+/i,
+    setSetting: /^set\s+[\w-.]+(\s+|\s?)=(\s+|\s?)[\w-.]+(\s+|\s?);/gim
+  },
+
+  validationValues: {
+    bool: [
+      Ember.Object.create({
+        value: 'true'
+      }),
+      Ember.Object.create({
+        value: 'false'
+      })
+    ],
+
+    execEngine: [
+      Ember.Object.create({
+        value: 'tez'
+      }),
+      Ember.Object.create({
+        value: 'mr'
+      })
+    ]
+  },
+
+  insensitiveCompare: function (sourceString) {
+    var args = Array.prototype.slice.call(arguments, 1);
+
+    if (!sourceString) {
+      return false;
+    }
+
+    return !!args.find(function (arg) {
+      return sourceString.match(new RegExp('^' + arg + '$', 'i'));
+    });
+  },
+
+  insensitiveContains: function (sourceString, destString) {
+    return sourceString.toLowerCase().indexOf(destString.toLowerCase()) > -1;
+  },
+
+  convertToArray: function (inputObj) {
+    var array = [];
+
+    for (var key in inputObj) {
+      if (inputObj.hasOwnProperty(key)) {
+        array.pushObject({
+          name: key,
+          value: inputObj[key]
+        });
+      }
+    }
+    return array;
+  },
+
+  /**
+   * Convert number of seconds into time object HH MM SS
+   *
+   * @param integer secs Number of seconds to convert
+   * @return object
+   */
+  secondsToHHMMSS: function (secs) {
+    var hours = 0,
+      minutes = 0,
+      seconds = secs,
+      divisor_for_minutes,
+      divisor_for_seconds,
+      formattedVal = [];
+
+    if (seconds < 60) {
+      formattedVal.push(Ember.I18n.t('labels.secsShort', {
+        seconds: seconds
+      }));
+    } else {
+      hours = Math.floor(seconds / (60 * 60));
+
+      divisor_for_minutes = seconds % (60 * 60);
+      minutes = Math.floor(divisor_for_minutes / 60);
+
+      divisor_for_seconds = divisor_for_minutes % 60;
+      seconds = Math.ceil(divisor_for_seconds);
+
+      if (hours > 0) {
+        formattedVal.push(Ember.I18n.t('labels.hrsShort', {
+          hours: hours
+        }));
+      }
+      if (minutes > 0) {
+        formattedVal.push(Ember.I18n.t('labels.minsShort', {
+          minutes: minutes
+        }));
+      }
+      if (seconds > 0) {
+        formattedVal.push(Ember.I18n.t('labels.secsShort', {
+          seconds: seconds
+        }));
+      }
+
+    }
+
+    return formattedVal.join(' ');
+  },
+
+  /**
+   * Returns all Ascii characters which will be used to fill the termination characters
+   */
+  getAllTerminationCharacters() {
+    let arr = Ember.copy(nonPrintableChars);
+    for(let i=33; i < 127; i++) {
+      arr.pushObject({id: i.toString(), name: String.fromCodePoint(i)});
+    }
+    return arr;
+  }
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/configs/hive-parameters.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/configs/hive-parameters.js b/contrib/views/hive20/src/main/resources/ui/app/configs/hive-parameters.js
new file mode 100644
index 0000000..df16c39
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/configs/hive-parameters.js
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import helpers from './helpers';
+
+let hiveParameters = [
+  Ember.Object.create({
+    name: 'hive.tez.container.size',
+    validate: helpers.regexes.digits
+  }),
+  Ember.Object.create({
+    name: 'hive.prewarm.enabled',
+    values: helpers.validationValues.bool
+  }),
+  Ember.Object.create({
+    name: 'hive.prewarm.numcontainers',
+    validate: helpers.regexes.digits
+  }),
+  Ember.Object.create({
+    name: 'hive.tez.auto.reducer.parallelism',
+    values: helpers.validationValues.bool
+  }),
+  Ember.Object.create({
+    name: 'hive.execution.engine',
+    values: helpers.validationValues.execEngine
+  }),
+  Ember.Object.create({
+    name: 'hive.vectorized.execution.enabled',
+    values: helpers.validationValues.bool
+  }),
+  Ember.Object.create({
+    name: 'hive.auto.convert.join',
+    values: helpers.validationValues.bool
+  }),
+  Ember.Object.create({
+    name: 'tez.am.resource.memory.mb',
+    validate: helpers.regexes.digits
+  }),
+  Ember.Object.create({
+    name: 'tez.am.container.idle.release-timeout-min.millis',
+    validate: helpers.regexes.digits
+  }),
+  Ember.Object.create({
+    name: 'tez.am.container.idle.release-timeout-max.millis',
+    validate: helpers.regexes.digits
+  }),
+  Ember.Object.create({
+    name: 'tez.queue.name',
+    validate: helpers.regexes.name
+  }),
+  Ember.Object.create({
+    name: 'tez.runtime.io.sort.mb',
+    validate: helpers.regexes.digits
+  }),
+  Ember.Object.create({
+    name: 'tez.runtime.sort.threads',
+    validate: helpers.regexes.digits
+  }),
+  Ember.Object.create({
+    name: 'tez.runtime.compress.codec',
+    validate: helpers.regexes.dotPath
+  }),
+  Ember.Object.create({
+    name: 'tez.grouping.min-size',
+    validate: helpers.regexes.digits
+  }),
+  Ember.Object.create({
+    name: 'tez.grouping.max-size',
+    validate: helpers.regexes.digits
+  }),
+  Ember.Object.create({
+    name: 'tez.generate.debug.artifacts',
+    values: helpers.validationValues.bool
+  })
+];
+
+export default hiveParameters;

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/configs/non-printable-escape-chars.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/configs/non-printable-escape-chars.js b/contrib/views/hive20/src/main/resources/ui/app/configs/non-printable-escape-chars.js
new file mode 100644
index 0000000..6c41a11
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/configs/non-printable-escape-chars.js
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export default [{"id": "0", "name": "NUL", "description": "(null)"},
+  {"id": "1", "name": "SOH", "description": "(start of heading)"},
+  {"id": "2", "name": "STX", "description": "(start of text)"},
+  {"id": "3", "name": "ETX", "description": "(end of text)"},
+  {"id": "4", "name": "EOT", "description": "(end of transmission)"},
+  {"id": "5", "name": "ENQ", "description": "(enquiry)"},
+  {"id": "6", "name": "ACK", "description": "(acknowledge)"},
+  {"id": "7", "name": "BEL", "description": "(bell)"},
+  {"id": "8", "name": "BS", "description": "(backspace)"},
+  {"id": "9", "name": "TAB", "description": "(horizontal tab)"},
+  {"id": "10", "name": "LF", "description": "(NL line feed - new line)"},
+  {"id": "11", "name": "VT", "description": "(vertical tab)"},
+  {"id": "12", "name": "FF", "description": "(NP form feed - new page)"},
+  {"id": "13", "name": "CR", "description": "(carriage return)"},
+  {"id": "14", "name": "SO", "description": "(shift out)"},
+  {"id": "15", "name": "SI", "description": "(shift in)"},
+  {"id": "16", "name": "DLE", "description": "(data link escape)"},
+  {"id": "17", "name": "DC1", "description": "(device control 1)"},
+  {"id": "18", "name": "DC2", "description": "(device control 2)"},
+  {"id": "19", "name": "DC3", "description": "(device control 3)"},
+  {"id": "20", "name": "DC4", "description": "(device control 4)"},
+  {"id": "21", "name": "NAK", "description": "(negative ackowledge)"},
+  {"id": "22", "name": "SYN", "description": "(synchronous idle)"},
+  {"id": "23", "name": "ETB", "description": "(end of trans. block)"},
+  {"id": "24", "name": "CAN", "description": "(cancel)"},
+  {"id": "25", "name": "EM", "description": "(end of medium)"},
+  {"id": "26", "name": "SUB", "description": "(substitute)"},
+  {"id": "27", "name": "ESC", "description": "(escape)"},
+  {"id": "28", "name": "FS", "description": "(file separator)"},
+  {"id": "29", "name": "GS", "description": "(group separator)"},
+  {"id": "30", "name": "RS", "description": "(record separator)"},
+  {"id": "31", "name": "US", "description": "(unit separator)"},
+  {"id": "32", "name": "Space", "description": ""},
+  {"id": "127", "name": "DEL", "description": ""}
+];

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/configs/table-level-tabs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/configs/table-level-tabs.js b/contrib/views/hive20/src/main/resources/ui/app/configs/table-level-tabs.js
new file mode 100644
index 0000000..7a0cec1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/configs/table-level-tabs.js
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+let tableLevelTabs = [
+  Ember.Object.create({
+    name: 'columns',
+    label: 'COLUMNS',
+    link: 'databases.database.tables.table.columns',
+    faIcon: 'list'
+  }),
+  Ember.Object.create({
+    name: 'partitions',
+    label: 'PARTITIONS',
+    link: 'databases.database.tables.table.partitions',
+    faIcon: 'file-text-o'
+  }),
+  Ember.Object.create({
+    name: 'ddl',
+    label: 'DDL',
+    link: 'databases.database.tables.table.ddl',
+    faIcon: 'file-text-o'
+  }),
+  Ember.Object.create({
+    name: 'storage',
+    label: 'STORAGE INFORMATION',
+    link: 'databases.database.tables.table.storage',
+    faIcon: 'file-text-o'
+  }),
+  Ember.Object.create({
+    name: 'detailedInfo',
+    label: 'DETAILED INFORMATION',
+    link: 'databases.database.tables.table.details',
+    faIcon: 'file-text-o'
+  }),
+  Ember.Object.create({
+    name: 'viewInfo',
+    label: 'VIEW INFORMATION',
+    link: 'databases.database.tables.table.view',
+    faIcon: 'file-text-o'
+  }),
+  Ember.Object.create({
+    name: 'statistics',
+    label: 'STATISTICS',
+    link: 'databases.database.tables.table.stats',
+    faIcon: 'line-chart'
+  })
+];
+
+export default tableLevelTabs;

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/configs/top-level-tabs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/configs/top-level-tabs.js b/contrib/views/hive20/src/main/resources/ui/app/configs/top-level-tabs.js
new file mode 100644
index 0000000..6f04f0a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/configs/top-level-tabs.js
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+let topLevelTabs = [
+  Ember.Object.create({
+    name: 'query',
+    label: 'QUERY',
+    link: 'query',
+    faIcon: 'paper-plane'
+  }),
+  Ember.Object.create({
+    name: 'jobs',
+    label: 'JOBS',
+    link: 'jobs',
+    faIcon: 'paper-plane'
+  }),
+  Ember.Object.create({
+    name: 'tables',
+    label: 'TABLES',
+    link: 'databases',
+    faIcon: 'table'
+  }),
+  Ember.Object.create({
+    name: 'saves-queries',
+    label: 'SAVED QUERIES',
+    link: 'savedqueries',
+    faIcon: 'paperclip'
+  }),
+  Ember.Object.create({
+    name: 'udfs',
+    label: 'UDFs',
+    link: 'udfs',
+    faIcon: 'puzzle-piece'
+  }),
+  Ember.Object.create({
+    name: 'settings',
+    label: 'SETTINGS',
+    link: 'settings',
+    faIcon: 'cog'
+  }),
+  Ember.Object.create({
+    name: 'notifications',
+    label: 'NOTIFICATIONS',
+    link: 'messages',
+    faIcon: 'bell',
+    pullRight: true
+  })
+];
+
+export default topLevelTabs;

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/controllers/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/controllers/.gitkeep b/contrib/views/hive20/src/main/resources/ui/app/controllers/.gitkeep
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/controllers/jobs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/controllers/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/controllers/jobs.js
new file mode 100644
index 0000000..9ab46f3
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/controllers/jobs.js
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Controller.extend({
+  moment: Ember.inject.service(),
+  queryParams: ['startTime', 'endTime'],
+  startTime: null,
+  endTime: null,
+
+
+  startTimeText: Ember.computed('startTime', function() {
+    return this.get('moment').moment(this.get('startTime')).format('YYYY-MM-DD');
+  }),
+
+  endTimeText: Ember.computed('endTime', function() {
+    return this.get('moment').moment(this.get('endTime')).format('YYYY-MM-DD');
+  })
+
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/helpers/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/helpers/.gitkeep b/contrib/views/hive20/src/main/resources/ui/app/helpers/.gitkeep
new file mode 100644
index 0000000..e69de29


[15/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/DetailedTableInfoParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/DetailedTableInfoParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/DetailedTableInfoParser.java
new file mode 100644
index 0000000..b526789
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/DetailedTableInfoParser.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.DetailedTableInfo;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ *
+ */
+public class DetailedTableInfoParser extends AbstractTableMetaParser<DetailedTableInfo> {
+  /*
+    | # Detailed Table Information  | NULL                                                                 | NULL                                                                                                                                                                                                                                                              |
+    | Database:                     | default                                                              | NULL                                                                                                                                                                                                                                                              |
+    | Owner:                        | admin                                                                | NULL                                                                                                                                                                                                                                                              |
+    | CreateTime:                   | Mon Aug 01 13:28:42 UTC 2016                                         | NULL                                                                                                                                                                                                                                                              |
+    | LastAccessTime:               | UNKNOWN                                                              | NULL                                                                                                                                                                                                                                                              |
+    | Protect Mode:                 | None                                                                 | NULL                                                                                                                                                                                                                                                              |
+    | Retention:                    | 0                                                                    | NULL                                                                                                                                                                                                                                                              |
+    | Location:                     | hdfs://c6401.ambari.apache.org:8020/apps/hive/warehouse/geolocation  | NULL                                                                                                                                                                                                                                                              |
+    | Table Type:                   | MANAGED_TABLE                                                        | NULL                                                                                                                                                                                                                                                              |
+    | Table Parameters:             | NULL                                                                 | NULL                                                                                                                                                                                                                                                              |
+    |                               | COLUMN_STATS_ACCURATE                                                | {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"column1\":\"true\",\"column2\":\"true\",\"column3\":\"true\",\"column4\":\"true\",\"column5\":\"true\",\"column6\":\"true\",\"column7\":\"true\",\"column8\":\"true\",\"column9\":\"true\",\"column10\":\"true\"}}  |
+    |                               | numFiles                                                             | 1                                                                                                                                                                                                                                                                 |
+    |                               | numRows                                                              | 8001                                                                                                                                                                                                                                                              |
+    |                               | rawDataSize                                                          | 7104888                                                                                                                                                                                                                                                           |
+    |                               | totalSize                                                            | 43236                                                                                                                                                                                                                                                             |
+    |                               | transient_lastDdlTime                                                | 1479819460                                                                                                                                                                                                                                                        |
+    |                               | NULL                                                                 | NULL                                                                                                                                                                                                                                                              |
+   */
+  public DetailedTableInfoParser() {
+    super("# Detailed Table Information", null, "");
+  }
+
+  @Override
+  public DetailedTableInfo parse(List<Row> rows) {
+    DetailedTableInfo info = new DetailedTableInfo();
+    Map<String, Object> parsedSection = parseSection(rows);
+    info.setDbName(getString(parsedSection, "Database:"));
+    info.setOwner(getString(parsedSection, "Owner:"));
+    info.setCreateTime(getString(parsedSection, "CreateTime:"));
+    info.setLastAccessTime(getString(parsedSection, "LastAccessTime:"));
+    info.setRetention(getString(parsedSection, "Retention:"));
+    info.setLocation(getString(parsedSection, "Location:"));
+    info.setTableType(getString(parsedSection, "Table Type:"));
+
+    info.setParameters(getMap(parsedSection, "Table Parameters:"));
+
+    return info;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtils.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtils.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtils.java
new file mode 100644
index 0000000..b4adf5c
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtils.java
@@ -0,0 +1,52 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class ParserUtils {
+
+  public static final String DATA_TYPE_REGEX = "\\s*([^() ]+)\\s*(\\s*\\(\\s*([0-9]+)\\s*(\\s*,\\s*([0-9]+))?\\s*\\)\\s*)?\\s*";
+
+  /**
+   * @param columnDataTypeString : the string that needs to be parsed as a datatype example : decimal(10,3)
+   * @return a list of string containing type, precision and scale in that order if present, null otherwise
+   */
+  public static List<String> parseColumnDataType(String columnDataTypeString) {
+    List<String> typePrecisionScale = new ArrayList<>(3);
+
+    Pattern pattern = Pattern.compile(DATA_TYPE_REGEX);
+    Matcher matcher = pattern.matcher(columnDataTypeString);
+
+    if (matcher.find()) {
+      typePrecisionScale.add(matcher.group(1));
+      typePrecisionScale.add(matcher.group(3));
+      typePrecisionScale.add(matcher.group(5));
+    }else{
+      typePrecisionScale.add(null);
+      typePrecisionScale.add(null);
+      typePrecisionScale.add(null);
+    }
+
+    return typePrecisionScale;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/PartitionInfoParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/PartitionInfoParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/PartitionInfoParser.java
new file mode 100644
index 0000000..fbcb6e8
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/PartitionInfoParser.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
+import org.apache.ambari.view.hive20.internal.dto.PartitionInfo;
+import org.apache.parquet.Strings;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ *
+ */
+public class PartitionInfoParser extends AbstractTableMetaParser<PartitionInfo> {
+  private static final Logger LOG = LoggerFactory.getLogger(PartitionInfoParser.class);
+
+  /*
+      General format
+      | # Partition Information       | NULL                                                                          | NULL                         |
+      | # col_name                    | data_type                                                                     | comment                      |
+      |                               | NULL                                                                          | NULL                         |
+      | dt                            | string                                                                        |                              |
+      | country                       | string                                                                        |                              |
+      |                               | NULL                                                                          | NULL                         |
+     */
+
+  public PartitionInfoParser() {
+    super("# Partition Information", "# col_name", "", "");
+  }
+
+  @Override
+  public PartitionInfo parse(List<Row> rows) {
+    List<ColumnInfo> columns = new ArrayList<>();
+
+
+    Map<String, Object> parsedSection = parseSection(rows);
+    for(Object obj: parsedSection.values()) {
+      if(obj instanceof Entry) {
+        Entry entry = (Entry)obj;
+        String typeInfo = entry.getValue();
+        // parse precision and scale
+        List<String> typePrecisionScale = ParserUtils.parseColumnDataType(typeInfo);
+        String datatype = typePrecisionScale.get(0);
+        String precisionString = typePrecisionScale.get(1);
+        String scaleString = typePrecisionScale.get(2);
+        Integer precision = !Strings.isNullOrEmpty(precisionString) ? Integer.valueOf(precisionString.trim()): null;
+        Integer scale = !Strings.isNullOrEmpty(scaleString) ? Integer.valueOf(scaleString.trim()): null;
+        ColumnInfo columnInfo = new ColumnInfo(entry.getName(), datatype, precision, scale, entry.getComment());
+        columns.add(columnInfo);
+        LOG.debug("found partition column definition : {}", columnInfo);
+      }
+    }
+    return columns.size() > 0? new PartitionInfo(columns) : null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/StorageInfoParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/StorageInfoParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/StorageInfoParser.java
new file mode 100644
index 0000000..09fcfd0
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/StorageInfoParser.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import com.google.common.base.Function;
+import com.google.common.base.Predicate;
+import com.google.common.collect.FluentIterable;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnOrder;
+import org.apache.ambari.view.hive20.internal.dto.Order;
+import org.apache.ambari.view.hive20.internal.dto.StorageInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Nullable;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Parses the Storage Information from the describe formatted output.
+ */
+public class StorageInfoParser extends AbstractTableMetaParser<StorageInfo> {
+  private static final Logger LOG = LoggerFactory.getLogger(StorageInfoParser.class);
+
+
+  public StorageInfoParser() {
+    super("# Storage Information", null, "");
+  }
+
+  @Override
+  public StorageInfo parse(List<Row> rows) {
+    StorageInfo info = new StorageInfo();
+    Map<String, Object> parsedSection = parseSection(rows);
+
+    info.setSerdeLibrary(getString(parsedSection, "SerDe Library:"));
+    info.setInputFormat(getString(parsedSection, "InputFormat:"));
+    info.setOutputFormat(getString(parsedSection, "OutputFormat:"));
+    info.setCompressed(getString(parsedSection, "Compressed:"));
+    info.setNumBuckets(getString(parsedSection, "Num Buckets:"));
+    info.setBucketCols(parseBucketColumns(getString(parsedSection, "Bucket Columns:")));
+    info.setSortCols(parseSortCols(getString(parsedSection, "Sort Columns:")));
+    info.setParameters(getMap(parsedSection, "Storage Desc Params:"));
+
+    return info;
+  }
+
+  private List<String> parseBucketColumns(String string) {
+    String[] strings = string.split("[\\[\\],]");
+    return FluentIterable.from(Arrays.asList(strings)).filter(new Predicate<String>() {
+      @Override
+      public boolean apply(@Nullable String input) {
+        return !(null == input || input.trim().length() == 0) ;
+      }
+    }).transform(new Function<String, String>() {
+      @Override
+      public String apply(String input) {
+        return input.trim();
+      }
+    }).toList();
+  }
+
+  private List<ColumnOrder> parseSortCols(String str) {
+    String patternStr = "Order\\s*\\(\\s*col\\s*:\\s*([^,]+)\\s*,\\s*order\\s*:\\s*(\\d)\\s*\\)";
+    Pattern pattern = Pattern.compile(patternStr);
+
+    Matcher matcher = pattern.matcher(str);
+
+    LinkedList<ColumnOrder> list = new LinkedList<>();
+    while(matcher.find()){
+      String colName = matcher.group(1);
+      String orderString = matcher.group(2);
+      Order order = Order.fromOrdinal(Integer.valueOf(orderString));
+      ColumnOrder co = new ColumnOrder(colName, order);
+      list.add(co);
+      LOG.debug("columnOrder : {}", co);
+    }
+
+    return list;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParser.java
new file mode 100644
index 0000000..aae23c8
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParser.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import org.apache.ambari.view.hive20.client.Row;
+
+import java.util.List;
+
+/**
+ *
+ */
+public interface TableMetaParser<T> {
+  T parse(String database, String table, List<Row> createTableStatementRows, List<Row> describeFormattedRows);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java
new file mode 100644
index 0000000..5cae34a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
+import org.apache.ambari.view.hive20.internal.dto.DetailedTableInfo;
+import org.apache.ambari.view.hive20.internal.dto.PartitionInfo;
+import org.apache.ambari.view.hive20.internal.dto.StorageInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.apache.ambari.view.hive20.internal.dto.ViewInfo;
+
+import javax.inject.Inject;
+import java.util.List;
+
+/**
+ *
+ */
+public class TableMetaParserImpl implements TableMetaParser<TableMeta> {
+
+  @Inject
+  private CreateTableStatementParser createTableStatementParser;
+
+  @Inject
+  private ColumnInfoParser columnInfoParser;
+
+  @Inject
+  private PartitionInfoParser partitionInfoParser;
+
+  @Inject
+  private DetailedTableInfoParser detailedTableInfoParser;
+
+  @Inject
+  private StorageInfoParser storageInfoParser;
+
+  @Inject
+  private ViewInfoParser viewInfoParser;
+
+
+
+  @Override
+  public TableMeta parse(String database, String table, List<Row> createTableStatementRows, List<Row> describeFormattedRows) {
+    String createTableStatement = createTableStatementParser.parse(createTableStatementRows);
+    DetailedTableInfo tableInfo = detailedTableInfoParser.parse(describeFormattedRows);
+    StorageInfo storageInfo = storageInfoParser.parse(describeFormattedRows);
+    List<ColumnInfo> columns = columnInfoParser.parse(describeFormattedRows);
+    PartitionInfo partitionInfo = partitionInfoParser.parse(describeFormattedRows);
+    ViewInfo viewInfo = viewInfoParser.parse(describeFormattedRows);
+
+
+    TableMeta meta = new TableMeta();
+    meta.setId(database + "/" + table);
+    meta.setDatabase(database);
+    meta.setTable(table);
+    meta.setColumns(columns);
+    meta.setDdl(createTableStatement);
+    meta.setPartitionInfo(partitionInfo);
+    meta.setDetailedInfo(tableInfo);
+    meta.setStorageInfo(storageInfo);
+    meta.setViewInfo(viewInfo);
+    return meta;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaSectionParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaSectionParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaSectionParser.java
new file mode 100644
index 0000000..7d5e170
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaSectionParser.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import org.apache.ambari.view.hive20.client.Row;
+
+import java.util.List;
+
+/**
+ *
+ */
+public interface TableMetaSectionParser<T> {
+  T parse(List<Row> rows);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ViewInfoParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ViewInfoParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ViewInfoParser.java
new file mode 100644
index 0000000..ba0b069
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ViewInfoParser.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ViewInfo;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Parses the view Information from the describe formatted output.
+ */
+public class ViewInfoParser extends AbstractTableMetaParser<ViewInfo>{
+
+  public ViewInfoParser() {
+    super("# View Information", null, "");
+  }
+
+  @Override
+  public ViewInfo parse(List<Row> rows) {
+    ViewInfo info = new ViewInfo();
+    Map<String, Object> parsedSection = parseSection(rows);
+    if(parsedSection.size() == 0) {
+      return null; // View Information is not present
+    }
+    info.setOriginalText(getString(parsedSection, "View Original Text:"));
+    info.setExtendedText(getString(parsedSection, "View Expanded Text:"));
+    return info;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerator.java
new file mode 100644
index 0000000..73f8266
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerator.java
@@ -0,0 +1,365 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.base.Predicate;
+import com.google.common.base.Strings;
+import com.google.common.collect.FluentIterable;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
+import org.apache.ambari.view.hive20.internal.dto.ColumnOrder;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Nullable;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import static org.apache.ambari.view.hive20.internal.query.generators.QueryGenerationUtils.isNullOrEmpty;
+
+public class AlterTableQueryGenerator implements QueryGenerator {
+  private static final Logger LOG = LoggerFactory.getLogger(AlterTableQueryGenerator.class);
+
+  private final TableMeta oldMeta;
+  private final TableMeta newMeta;
+
+  public AlterTableQueryGenerator(TableMeta oldMeta, TableMeta newMeta) {
+    this.oldMeta = oldMeta;
+    this.newMeta = newMeta;
+  }
+
+  public TableMeta getOldMeta() {
+    return oldMeta;
+  }
+
+  public TableMeta getNewMeta() {
+    return newMeta;
+  }
+
+  public String getQueryPerfix() {
+    return new StringBuffer(" ALTER TABLE ")
+      .append("`").append(this.getOldMeta().getDatabase()).append(".").append(this.getOldMeta().getTable().trim()).append("` ").toString();
+  }
+
+  public Optional<String> getQuery() {
+    List<Optional<String>> queries = new LinkedList<>();
+
+    // TODO: rename of table name has to be handled separately as other queries depend on new name.
+//    Optional<String> tableRenameQuery = this.generateTableRenameQuery(this.getOldMeta().getDatabase(),
+//      this.getOldMeta().getTable(), this.getNewMeta().getDatabase(), this.getNewMeta().getTable());
+//    queries.add(tableRenameQuery);
+
+    Optional<List<Optional<String>>> columnQuery = this.generateColumnQuery();
+    if (columnQuery.isPresent()) {
+      queries.addAll(columnQuery.get());
+    }
+
+    if (null != this.getNewMeta().getDetailedInfo() && null != this.getNewMeta().getDetailedInfo()) {
+      Optional<String> tablePropertiesQuery = this.generateTablePropertiesQuery(this.getOldMeta().getDetailedInfo().getParameters(),
+        this.getNewMeta().getDetailedInfo().getParameters());
+      queries.add(tablePropertiesQuery);
+    }
+
+    if (null != this.getOldMeta().getStorageInfo() && null != this.getNewMeta().getStorageInfo()) {
+      String oldSerde = this.getOldMeta().getStorageInfo().getSerdeLibrary();
+      String newSerde = this.getNewMeta().getStorageInfo().getSerdeLibrary();
+      Map<String, String> oldParameters = this.getOldMeta().getStorageInfo().getParameters();
+      Map<String, String> newParameters = this.getNewMeta().getStorageInfo().getParameters();
+
+      Optional<String> serdeProperties = this.generateSerdeQuery(oldSerde, oldParameters, newSerde, newParameters);
+      queries.add(serdeProperties);
+    }
+
+    if (null != this.getOldMeta().getStorageInfo() && null != this.getNewMeta().getStorageInfo()) {
+      List<String> oldBucketCols = this.getOldMeta().getStorageInfo().getBucketCols();
+      List<ColumnOrder> oldSortCols = this.getOldMeta().getStorageInfo().getSortCols();
+      String oldNumBuckets = this.getOldMeta().getStorageInfo().getNumBuckets();
+
+      List<String> newBucketCols = this.getNewMeta().getStorageInfo().getBucketCols();
+      List<ColumnOrder> newSortCols = this.getNewMeta().getStorageInfo().getSortCols();
+      String newNumBuckets = this.getNewMeta().getStorageInfo().getNumBuckets();
+
+      Optional<String> storagePropertyQuery = this.generateStoragePropertyQuery(oldBucketCols, oldSortCols, oldNumBuckets, newBucketCols, newSortCols, newNumBuckets);
+      queries.add(storagePropertyQuery);
+    }
+
+
+    List<String> queryList = FluentIterable.from(queries).filter(new Predicate<Optional<String>>() {
+      @Override
+      public boolean apply(Optional<String> input) {
+        return input.isPresent();
+      }
+    }).transform(new Function<Optional<String>, String>() {
+      @Override
+      public String apply(Optional<String> input) {
+          return input.get();
+      }
+    }).toList();
+
+    if (!queryList.isEmpty()) {
+      return Optional.of(Joiner.on(";\n").join(queryList));
+    } else {
+      return Optional.absent();
+    }
+
+  }
+
+  Optional<List<Optional<String>>> generateColumnQuery() {
+    List<ColumnInfo> oldColumns = this.getOldMeta().getColumns();
+    List<ColumnInfo> newColumns = this.getNewMeta().getColumns();
+    boolean cascade = null != this.getNewMeta().getPartitionInfo() && !isNullOrEmpty(this.getNewMeta().getPartitionInfo().getColumns());
+    Optional<List<String>> queries = createColumnQueries(oldColumns, newColumns, cascade);
+    if (queries.isPresent()) {
+      List<Optional<String>> queryList = FluentIterable.from(queries.get()).transform(new Function<String, Optional<String>>() {
+        @Override
+        public Optional<String> apply(String input) {
+          return Optional.of(getQueryPerfix() + input);
+        }
+      }).toList();
+      return Optional.of(queryList);
+    } else {
+      return Optional.absent();
+    }
+  }
+
+  /**
+   * TODO : this uses CASCADE. confirm that it is expected.
+   * ALTER TABLE table_name [PARTITION partition_spec] CHANGE [COLUMN] col_old_name col_new_name column_type
+   * [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT];
+   * <p>
+   * ALTER TABLE table_name
+   * [PARTITION partition_spec]                 -- (Note: Hive 0.14.0 and later)
+   * ADD|REPLACE COLUMNS (col_name data_type [COMMENT col_comment], ...)
+   * [CASCADE|RESTRICT]                         -- (Note: Hive 0.15.0 and later)
+   *
+   * @param oldColumns
+   * @param newColumns
+   * @return
+   */
+  static Optional<List<String>> createColumnQueries(List<ColumnInfo> oldColumns, List<ColumnInfo> newColumns, boolean cascade) {
+    if (isNullOrEmpty(oldColumns) || isNullOrEmpty(newColumns)) {
+      LOG.error("oldColumns = {} or newColumns = {} was null.", oldColumns, newColumns);
+      throw new IllegalArgumentException("Old or new columns cannot be empty.");
+    }
+
+    //TODO : removing columns not allowed right now. handle this later using REPLACE for native serde or error.
+    if (oldColumns.size() > newColumns.size()) {
+      LOG.error("removing columns from hive table is not supported yet.");
+      throw new IllegalArgumentException("removing columns is not allowed.");
+    }
+
+    List<String> queries = new LinkedList<>();
+    int i = 0;
+    boolean foundChange = false;
+    for (; i < oldColumns.size(); i++) {
+      ColumnInfo oldColumn = oldColumns.get(i);
+      ColumnInfo newColumn = newColumns.get(i);
+
+      if (!oldColumn.equals(newColumn)) {
+        foundChange = true;
+        StringBuilder queryBuilder = new StringBuilder();
+        queryBuilder.append(" CHANGE COLUMN `").append(oldColumn.getName()).append("` ")
+          .append(QueryGenerationUtils.getColumnRepresentation(newColumn));
+
+          if(cascade){
+            queryBuilder.append(" CASCADE");
+          }
+
+        queries.add(queryBuilder.toString());
+      }
+    }
+
+    if (i < newColumns.size()) {
+      StringBuilder queryBuilder = new StringBuilder();
+      queryBuilder.append(" ADD COLUMNS ( ");
+      boolean first = true;
+      for (; i < newColumns.size(); i++) {
+        foundChange = true;
+        ColumnInfo columnInfo = newColumns.get(i);
+        if (!first) {
+          queryBuilder.append(", ");
+        } else {
+          first = false;
+        }
+
+        queryBuilder.append(QueryGenerationUtils.getColumnRepresentation(columnInfo));
+      }
+      queryBuilder.append(" )");
+
+      if(cascade){
+        queryBuilder.append(" CASCADE");
+      }
+
+      queries.add(queryBuilder.toString());
+    }
+
+    if (foundChange) {
+      return Optional.of(queries);
+    } else {
+      return Optional.absent();
+    }
+  }
+
+  Optional<String> generateStoragePropertyQuery(List<String> oldBucketCols, List<ColumnOrder> oldSortCols, String oldNumBuckets, List<String> newBucketCols, List<ColumnOrder> newSortCols, String newNumBuckets) {
+    Optional<String> query = createStoragePropertyQuery(oldBucketCols, oldSortCols, oldNumBuckets, newBucketCols, newSortCols, newNumBuckets);
+    if (query.isPresent()) return Optional.of(getQueryPerfix() + query.get());
+    else return Optional.absent();
+  }
+
+  /**
+   * ALTER TABLE table_name CLUSTERED BY (col_name, col_name, ...) [SORTED BY (col_name, ...)]
+   * INTO num_buckets BUCKETS;
+   *
+   * @param oldBucketCols
+   * @param oldSortCols
+   * @param oldNumBuckets
+   * @param newBucketCols
+   * @param newSortCols
+   * @param newNumBuckets
+   * @return
+   */
+  static Optional<String> createStoragePropertyQuery(List<String> oldBucketCols, List<ColumnOrder> oldSortCols, String oldNumBuckets, List<String> newBucketCols, List<ColumnOrder> newSortCols, String newNumBuckets) {
+    StringBuilder queryBuilder = new StringBuilder();
+    boolean foundDiff = false;
+
+    if (isNullOrEmpty(newBucketCols)) {
+      if (!isNullOrEmpty(oldBucketCols)) {
+        // TODO : all cols removed. how to handle this. Ignoring
+        LOG.error("cannot handle removal of all the columns from buckets.");
+        throw new IllegalArgumentException("removing all columns from CLUSTERED BY not allowed.");
+      } else {
+        // NOTHING ADDED to CLUSTERED BY.
+        return Optional.absent();
+      }
+    } else {
+      queryBuilder.append(" CLUSTERED BY ( ").append(Joiner.on(",").join(newBucketCols)).append(" ) ");
+    }
+
+    if (!isNullOrEmpty(newSortCols)) {
+      queryBuilder.append(" SORTED BY ( ")
+        .append(Joiner.on(",").join(FluentIterable.from(newSortCols).transform(new Function<ColumnOrder, String>() {
+          @Nullable
+          @Override
+          public String apply(@Nullable ColumnOrder input) {
+            return input.getColumnName() + " " + input.getOrder().name();
+          }
+        })))
+        .append(" ) ");
+    }
+
+    if (Strings.isNullOrEmpty(newNumBuckets)) {
+      LOG.error("Number of buckets cannot be empty if CLUSTERED BY is mentioned.");
+      throw new IllegalArgumentException("Number of buckets cannot be empty.");
+    } else {
+      queryBuilder.append(" INTO ").append(newNumBuckets).append(" BUCKETS ");
+    }
+
+    return Optional.of(queryBuilder.toString());
+  }
+
+  Optional<String> generateSerdeQuery(String oldSerde, Map<String, String> oldParameters, String newSerde, Map<String, String> newParameters) {
+    Optional<String> query = createSerdeQuery(oldSerde, oldParameters, newSerde, newParameters);
+    if (query.isPresent()) return Optional.of(getQueryPerfix() + query.get());
+    else return Optional.absent();
+  }
+
+  /**
+   * assuming that getStorageInfo().getParameters() gives only serde properties
+   *
+   * @return
+   */
+  static Optional<String> createSerdeQuery(String oldSerde, Map<String, String> oldParameters, String newSerde, Map<String, String> newParameters) {
+    String query = "";
+    boolean serdeChanged = false;
+    if (null != newSerde) {
+      serdeChanged = !newSerde.equals(oldSerde);
+      query += " SET SERDE " + newSerde + " ";
+    }
+    Optional<Map<String, Map<Object, Object>>> diff = QueryGenerationUtils.findDiff(oldParameters, newParameters);
+    if (diff.isPresent()) {
+      Map<String, Map<Object, Object>> diffMap = diff.get();
+      Map<Object, Object> added = diffMap.get(QueryGenerationUtils.ADDED);
+      Map<Object, Object> modified = diffMap.get(QueryGenerationUtils.MODIFIED);
+      Map<Object, Object> deleted = diffMap.get(QueryGenerationUtils.DELETED);
+
+      // TODO : how to handle deleted? actually I cannot find anything in hive alter table that will remove existing property
+      Map addedOrModified = new HashMap<>(added);
+      addedOrModified.putAll(modified);
+
+      if (serdeChanged) {
+        query += " WITH SERDEPROPERTIES ";
+      } else {
+        query += " SET SERDEPROPERTIES ";
+      }
+      query += " ( " + QueryGenerationUtils.getPropertiesAsKeyValues(addedOrModified) + " ) ";
+    }
+
+    if (!query.trim().isEmpty()) {
+      return Optional.of(query);
+    }
+
+    return Optional.absent();
+  }
+
+  Optional<String> generateTablePropertiesQuery(Map oldProps, Map newProps) {
+    Optional<String> query = createTablePropertiesQuery(oldProps, newProps);
+    if (query.isPresent()) return Optional.of(getQueryPerfix() + query.get());
+    else return Optional.absent();
+  }
+
+
+  static Optional<String> createTablePropertiesQuery(Map oldProps, Map newProps) {
+    if (null == newProps) {
+      newProps = new HashMap();
+    }
+// TODO ignore system generated table properties during comparison
+    if (!QueryGenerationUtils.isEqual(oldProps, newProps)) {
+      return Optional.of(" SET TBLPROPERTIES (" + QueryGenerationUtils.getPropertiesAsKeyValues(newProps) + ")");
+    }
+
+    return Optional.absent();
+  }
+
+  Optional<String> generateTableRenameQuery(String oldDatabaseName, String oldTableName, String newDatabaseName, String newTableName) {
+    Optional<String> query = createTableRenameQuery(oldDatabaseName, oldTableName, newDatabaseName, newTableName);
+    if (query.isPresent()) return Optional.of(getQueryPerfix() + query.get());
+    else return Optional.absent();
+  }
+
+  static Optional<String> createTableRenameQuery(String oldDatabaseName, String oldTableName, String newDatabaseName, String newTableName) {
+    if (Strings.isNullOrEmpty(oldTableName) || Strings.isNullOrEmpty(newTableName)) {
+      LOG.error("oldTableName or newTableName is empty : {}, {} ", oldTableName, newTableName);
+      throw new IllegalArgumentException("oldTableName and newTableName both should be non empty.");
+    }
+
+    String oldName = (null != oldDatabaseName ? oldDatabaseName.trim() + "." : "") + oldTableName.trim();
+    String newName = (null != newDatabaseName ? newDatabaseName.trim() + "." : "") + newTableName.trim();
+
+    if (!oldName.equals(newName)) {
+      return Optional.of(" RENAME TO " + newName);
+    }
+
+    return Optional.absent();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGenerator.java
new file mode 100644
index 0000000..eab3a4b
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGenerator.java
@@ -0,0 +1,165 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.base.Strings;
+import com.google.common.collect.FluentIterable;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
+import org.apache.ambari.view.hive20.internal.dto.ColumnOrder;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+
+import javax.annotation.Nullable;
+import java.util.List;
+import java.util.Map;
+
+public class CreateTableQueryGenerator implements QueryGenerator{
+  private static final String COMMENT = "COMMENT";
+  public static final String ESCAPE_DELIM = "escape.delim";
+  public static final String FIELD_DELIM = "field.delim";
+  public static final String COLELCTION_DELIM = "colelction.delim";
+  public static final String MAPKEY_DELIM = "mapkey.delim";
+  public static final String LINE_DELIM = "line.delim";
+  public static final String SERIALIZATION_NULL_FORMAT = "serialization.null.format";
+  private TableMeta tableMeta;
+  public CreateTableQueryGenerator(TableMeta tableMeta) {
+    this.tableMeta = tableMeta;
+  }
+
+  @Override
+  public Optional<String> getQuery(){
+    StringBuffer query = new StringBuffer();
+    query.append("CREATE TABLE ");
+    query.append(tableMeta.getDatabase()).append(".");
+    query.append(tableMeta.getTable()).append(" ");
+    query.append("(").append(getColumnQuery(tableMeta.getColumns())).append(") ");
+    if(null != tableMeta.getDetailedInfo() && null != tableMeta.getDetailedInfo().getParameters()){
+      String tableComment = tableMeta.getDetailedInfo().getParameters().get(COMMENT);
+      if(!Strings.isNullOrEmpty(tableComment)){
+        query.append(" COMMENT ").append(tableComment);
+      }
+    }
+    if(null != tableMeta.getPartitionInfo() ) {
+      if (tableMeta.getPartitionInfo().getColumns() != null && !tableMeta.getPartitionInfo().getColumns().isEmpty()) {
+        query.append(" PARTITIONED BY ( ").append(getColumnQuery(tableMeta.getPartitionInfo().getColumns())).append(")");
+      }
+    }
+    if(null != tableMeta.getStorageInfo()) {
+      if (!QueryGenerationUtils.isNullOrEmpty(tableMeta.getStorageInfo().getBucketCols())) {
+        query.append(" CLUSTERED BY (").append(Joiner.on(",").join(tableMeta.getStorageInfo().getBucketCols())).append(")");
+      }
+      if (!QueryGenerationUtils.isNullOrEmpty(tableMeta.getStorageInfo().getSortCols())) {
+        query.append(" SORTED BY (").append(getSortColQuery(tableMeta.getStorageInfo().getSortCols())).append(")");
+      }
+      if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getNumBuckets())) {
+        query.append(" INTO ").append(tableMeta.getStorageInfo().getNumBuckets()).append(" BUCKETS ");
+      }
+      // TODO : Skewed information not available right now.
+
+      if(!isNullOrEmpty(tableMeta.getStorageInfo().getParameters())) {
+        if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(ESCAPE_DELIM)) ||
+          !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(FIELD_DELIM)) ||
+          !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(COLELCTION_DELIM)) ||
+          !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(MAPKEY_DELIM)) ||
+          !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(LINE_DELIM)) ||
+          !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(SERIALIZATION_NULL_FORMAT))
+          ) {
+          query.append(" ROW FORMAT DELIMITED ");
+          if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(FIELD_DELIM))) {
+            query.append(" FIELDS TERMINATED BY '").append(tableMeta.getStorageInfo().getParameters().get(FIELD_DELIM)).append("'");
+          }
+          if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(ESCAPE_DELIM))) {
+            query.append(" ESCAPED BY '").append(tableMeta.getStorageInfo().getParameters().get(ESCAPE_DELIM)).append("'");
+          }
+          if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(COLELCTION_DELIM))) {
+            query.append(" COLLECTION ITEMS TERMINATED BY '").append(tableMeta.getStorageInfo().getParameters().get(COLELCTION_DELIM)).append("'");
+          }
+          if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(MAPKEY_DELIM))) {
+            query.append(" MAP KEYS TERMINATED BY '").append(tableMeta.getStorageInfo().getParameters().get(MAPKEY_DELIM)).append("'");
+          }
+          if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(LINE_DELIM))) {
+            query.append(" LINES TERMINATED BY '").append(tableMeta.getStorageInfo().getParameters().get(LINE_DELIM)).append("'");
+          }
+          if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(SERIALIZATION_NULL_FORMAT))) {
+            query.append(" NULL DEFINED AS '").append(tableMeta.getStorageInfo().getParameters().get(SERIALIZATION_NULL_FORMAT)).append("'");
+          }
+        }
+      }
+
+      // STORED AS file_format
+      if(!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getFileFormat()) && !tableMeta.getStorageInfo().getFileFormat().trim().isEmpty()){
+        query.append(" STORED AS ").append(tableMeta.getStorageInfo().getFileFormat().trim());
+      }else if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getInputFormat()) ||
+        !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getOutputFormat())
+        ) {
+        query.append(" STORED AS ");
+        if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getInputFormat())) {
+          query.append(" INPUTFORMAT '").append(tableMeta.getStorageInfo().getInputFormat()).append("'");
+        }
+        if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getOutputFormat())) {
+          query.append(" OUTPUTFORMAT '").append(tableMeta.getStorageInfo().getOutputFormat()).append("'");
+        }
+      }
+    }
+
+    if(null != tableMeta.getDetailedInfo()) {
+      if (!Strings.isNullOrEmpty(tableMeta.getDetailedInfo().getLocation())) {
+        query.append(" LOCATION '").append(tableMeta.getDetailedInfo().getLocation()).append("'");
+      }
+
+      if (QueryGenerationUtils.isNullOrEmpty(tableMeta.getDetailedInfo().getParameters())) {
+        String props = QueryGenerationUtils.getPropertiesAsKeyValues(tableMeta.getDetailedInfo().getParameters());
+
+        query.append(" TBLPROPERTIES (").append(props).append(")");
+      }
+    }
+
+    return Optional.of(query.toString());
+  }
+
+  private boolean isNullOrEmpty(Map map) {
+    return null == map || map.isEmpty();
+  }
+
+  private String getSortColQuery(List<ColumnOrder> sortCols) {
+    List<String> sortColsList = FluentIterable.from(sortCols).transform(new Function<ColumnOrder, String>() {
+      @Nullable
+      @Override
+      public String apply(@Nullable ColumnOrder input) {
+        return input.getColumnName() + " " + input.getOrder().name();
+      }
+    }).toList();
+    return Joiner.on(",").join(sortColsList);
+  }
+
+  private String getColumnQuery(List<ColumnInfo> columns) {
+    List<String> columnQuery = FluentIterable.from(columns).transform(new Function<ColumnInfo, String>() {
+      @Nullable
+      @Override
+      public String apply(@Nullable ColumnInfo column) {
+        return QueryGenerationUtils.getColumnRepresentation(column);
+      }
+    }).toList();
+
+    return Joiner.on(",").join(columnQuery);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/DeleteTableQueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/DeleteTableQueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/DeleteTableQueryGenerator.java
new file mode 100644
index 0000000..09b12b6
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/DeleteTableQueryGenerator.java
@@ -0,0 +1,67 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.exceptions.ServiceException;
+import org.apache.parquet.Strings;
+
+public class DeleteTableQueryGenerator implements QueryGenerator{
+  private final String databaseName;
+  private final String tableName;
+  private Boolean purge = Boolean.FALSE;
+
+  public DeleteTableQueryGenerator(String databaseName, String tableName) {
+    this(databaseName, tableName, Boolean.FALSE);
+  }
+
+  public DeleteTableQueryGenerator(String databaseName, String tableName, Boolean purge) {
+    this.databaseName = databaseName;
+    this.tableName = tableName;
+    if( null != purge ) this.purge = purge;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public Boolean getPurge() {
+    return purge;
+  }
+
+  public void setPurge(Boolean purge) {
+    this.purge = purge;
+  }
+
+  /**
+   * @return
+   * @throws ServiceException
+   */
+  @Override
+  public Optional<String> getQuery() throws ServiceException {
+    if(Strings.isNullOrEmpty(this.getDatabaseName()) || Strings.isNullOrEmpty(this.getTableName()))
+      throw new ServiceException("databaseName or tableName was null.");
+
+    return Optional.of("DROP TABLE `" + databaseName + "`.`" + tableName + "`" + (this.getPurge() ? " PURGE " : ""));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerationUtils.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerationUtils.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerationUtils.java
new file mode 100644
index 0000000..d9dc6e1
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerationUtils.java
@@ -0,0 +1,151 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.base.Strings;
+import com.google.common.collect.FluentIterable;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
+
+import javax.annotation.Nullable;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class QueryGenerationUtils {
+
+  public static final String ADDED = "ADDED";
+  public static final String DELETED = "DELETED";
+  public static final String MODIFIED = "MODIFIED";
+
+  public static boolean isNullOrEmpty(Map map) {
+    return null != map && !map.isEmpty();
+  }
+
+  public static boolean isNullOrEmpty(Collection collection) {
+    return null == collection || collection.isEmpty();
+  }
+
+  public static boolean isEqual(Map oldProps, Map newProps) {
+    if(oldProps == null && newProps == null) return true;
+
+    if(oldProps != null && newProps != null){
+      if(oldProps.size() != newProps.size()) return false;
+
+      Set<Map.Entry> entrySet = oldProps.entrySet();
+      for(Map.Entry e : entrySet){
+        Object key = e.getKey();
+        if(oldProps.get(key) == null){
+          if(newProps.get(key) != null) return false;
+        }else {
+          if (newProps.get(key) == null || !newProps.get(key).equals(oldProps.get(key))) {
+            return false;
+          }
+        }
+      }
+    }
+
+    return true;
+  }
+
+  /**
+   * return a map with 3 keys "DELETED" and "ADDED" and "MODIFIED" to show the different between oldProps and newProps
+   * for "ADDED" and "MODIFIED" the values in map are of newProps
+   * @param oldProps
+   * @param newProps
+   * @return
+   */
+  public static Optional<Map<String, Map<Object,Object>>> findDiff(Map oldProps, Map newProps) {
+    Map<String, Map<Object, Object>> ret = new HashMap<>();
+    Map<Object, Object> added = new HashMap<>();
+    Map<Object, Object> modified = new HashMap<>();
+    Map<Object, Object> deleted = new HashMap<>();
+
+    if(oldProps == null && newProps == null) return Optional.of(ret);
+
+    if(oldProps != null && newProps != null){
+      Set<Map.Entry> entrySet = oldProps.entrySet();
+      for(Map.Entry e : entrySet){
+        Object key = e.getKey();
+        Object newValue = newProps.get(key);
+        if(e.getValue() == null){
+          if( newValue != null){
+            added.put(key, newValue);
+          }
+        }else {
+          if (newValue == null) {
+            deleted.put(key, newValue);
+          }else if (!e.getValue().equals(newValue)){
+            modified.put(key, newValue);
+          }
+        }
+      }
+
+      Set<Map.Entry> newEntrySet = newProps.entrySet();
+      for(Map.Entry e : newEntrySet){
+        if(e.getValue() != null && oldProps.get(e.getKey()) == null){
+          added.put(e.getKey(), e.getValue());
+        }
+      }
+    }
+    ret.put(ADDED, added);
+    ret.put(DELETED, deleted);
+    ret.put(MODIFIED, modified);
+
+    return Optional.of(ret);
+  }
+
+  public static String getPropertiesAsKeyValues(Map<String, String> parameters) {
+    List<String> props = (List<String>) FluentIterable.from(parameters.entrySet())
+            .transform(new Function<Map.Entry<String, String>, String>() {
+              @Nullable
+              @Override
+              public String apply(@Nullable Map.Entry<String, String> entry) {
+                return "'" + entry.getKey() + "'='" + entry.getValue() + "'";
+              }
+            }).toList();
+
+    return Joiner.on(",").join(props);
+  }
+
+  public static String getColumnRepresentation(ColumnInfo column) {
+    StringBuilder colQuery = new StringBuilder().append("`").append(column.getName()).append("`");
+    colQuery.append(" ").append(column.getType());
+    if(!QueryGenerationUtils.isNullOrZero(column.getPrecision())){
+      if(!QueryGenerationUtils.isNullOrZero(column.getScale())){
+        colQuery.append("(").append(column.getPrecision()).append(",").append(column.getScale()).append(")");
+      }else{
+        colQuery.append("(").append(column.getPrecision()).append(")");
+      }
+    }
+    if(!Strings.isNullOrEmpty(column.getComment())) {
+      colQuery.append(" COMMENT '").append(column.getComment()).append("'");
+    }
+
+    return colQuery.toString();
+  }
+
+  public static boolean isNullOrZero(Integer integer) {
+    return null == integer || 0 == integer;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerator.java
new file mode 100644
index 0000000..0d8f350
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerator.java
@@ -0,0 +1,26 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.exceptions.ServiceException;
+
+public interface QueryGenerator {
+  Optional<String> getQuery() throws ServiceException;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/DataStoreStorage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/DataStoreStorage.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/DataStoreStorage.java
new file mode 100644
index 0000000..133ff08
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/DataStoreStorage.java
@@ -0,0 +1,140 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence;
+
+import org.apache.ambari.view.PersistenceException;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.commons.beanutils.BeanUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.beans.Transient;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Engine for storing objects to context DataStore storage
+ */
+public class DataStoreStorage implements Storage {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(DataStoreStorage.class);
+
+  protected ViewContext context;
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public DataStoreStorage(ViewContext context) {
+    this.context = context;
+  }
+
+  @Override
+  public synchronized void store(Class model, Indexed obj) {
+
+    try {
+      Indexed newBean = (Indexed) BeanUtils.cloneBean(obj);
+      preprocessEntity(newBean);
+      context.getDataStore().store(newBean);
+      obj.setId(newBean.getId());
+    } catch (Exception e) {
+      throw new ServiceFormattedException("S020 Data storage error", e);
+    }
+  }
+
+  private void preprocessEntity(Indexed obj) {
+    cleanTransientFields(obj);
+  }
+
+  private void cleanTransientFields(Indexed obj) {
+    for (Method m : obj.getClass().getMethods()) {
+      Transient aTransient = m.getAnnotation(Transient.class);
+      if (aTransient != null && m.getName().startsWith("set")) {
+        try {
+          m.invoke(obj, new Object[]{ null });
+        } catch (IllegalAccessException e) {
+          throw new ServiceFormattedException("S030 Data storage error", e);
+        } catch (InvocationTargetException e) {
+          throw new ServiceFormattedException("S030 Data storage error", e);
+        }
+      }
+    }
+  }
+
+  @Override
+  public synchronized <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound {
+    LOG.debug(String.format("Loading %s #%s", model.getName(), id));
+    try {
+      T obj = context.getDataStore().find(model, id);
+      if (obj != null) {
+        return obj;
+      } else {
+        throw new ItemNotFound();
+      }
+    } catch (PersistenceException e) {
+      throw new ServiceFormattedException("S040 Data storage error", e);
+    }
+  }
+
+  @Override
+  public synchronized <T extends Indexed> List<T> loadAll(Class<? extends T> model, FilteringStrategy filter) {
+    LinkedList<T> list = new LinkedList<T>();
+    LOG.debug(String.format("Loading all %s-s", model.getName()));
+    try {
+      for(T item: context.getDataStore().findAll(model, filter.whereStatement())) {
+        list.add(item);
+      }
+    } catch (PersistenceException e) {
+      throw new ServiceFormattedException("S050 Data storage error", e);
+    }
+    return list;
+  }
+
+  @Override
+  public synchronized <T extends Indexed> List<T> loadAll(Class<T> model) {
+    return loadAll(model, new OnlyOwnersFilteringStrategy(this.context.getUsername()));
+  }
+
+  @Override
+  public synchronized void delete(Class model, Object id) throws ItemNotFound {
+    LOG.debug(String.format("Deleting %s:%s", model.getName(), id));
+    Object obj = load(model, id);
+    try {
+      context.getDataStore().remove(obj);
+    } catch (PersistenceException e) {
+      throw new ServiceFormattedException("S060 Data storage error", e);
+    }
+  }
+
+  @Override
+  public boolean exists(Class model, Object id) {
+    try {
+      return context.getDataStore().find(model, id) != null;
+    } catch (PersistenceException e) {
+      throw new ServiceFormattedException("S070 Data storage error", e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/IStorageFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/IStorageFactory.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/IStorageFactory.java
new file mode 100644
index 0000000..eaad7ba
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/IStorageFactory.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence;
+
+public interface IStorageFactory {
+  Storage getStorage();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/InstanceKeyValueStorage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/InstanceKeyValueStorage.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/InstanceKeyValueStorage.java
new file mode 100644
index 0000000..f02b35d
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/InstanceKeyValueStorage.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.utils.ContextConfigurationAdapter;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.commons.configuration.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.WebApplicationException;
+
+
+/**
+ * Persistent storage engine for storing java beans to
+ * instance data
+ */
+@Deprecated
+public class InstanceKeyValueStorage extends KeyValueStorage {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(InstanceKeyValueStorage.class);
+
+  private ContextConfigurationAdapter config = null;
+  private int VALUE_LENGTH_LIMIT = 254;
+
+  /**
+   * Constructor.
+   * @param context View Context instance
+   */
+  public InstanceKeyValueStorage(ViewContext context) {
+    super(context);
+  }
+
+  /**
+   * Returns config instance, adapter to Persistence API
+   * @return config instance
+   */
+  @Override
+  protected synchronized Configuration getConfig() {
+    if (config == null) {
+      config = new ContextConfigurationAdapter(context);
+    }
+    return config;
+  }
+
+  /**
+   * Value is limited to 256 symbols, this code splits value into chunks and saves them as <key>#<chunk_id>
+   * @param modelPropName key
+   * @param json value
+   */
+  protected void write(String modelPropName, String json) {
+    int saved = 0;
+    int page = 1;
+    while (saved < json.length()) {
+      int end = Math.min(saved + VALUE_LENGTH_LIMIT, json.length());
+      String substring = json.substring(saved, end);
+      getConfig().setProperty(modelPropName + "#" + page, substring);
+      saved += VALUE_LENGTH_LIMIT;
+      page += 1;
+      LOG.debug("Chunk saved: " + modelPropName + "#" + page + "=" + substring);
+    }
+    getConfig().setProperty(modelPropName, page - 1);
+    LOG.debug("Write finished: " + modelPropName + " pages:" + (page - 1));
+  }
+
+  /**
+   * Read chunked value (keys format <key>#<chunk_id>)
+   * @param modelPropName key
+   * @return value
+   */
+  protected String read(String modelPropName) {
+    StringBuilder result = new StringBuilder();
+    int pages = getConfig().getInt(modelPropName);
+    LOG.debug("Read started: " + modelPropName + " pages:" + pages);
+
+    for(int page = 1; page <= pages; page++) {
+      String substring = getConfig().getString(modelPropName + "#" + page);
+      LOG.debug("Chunk read: " + modelPropName + "#" + page + "=" + substring);
+      if (substring != null) {
+        result.append(substring);
+      }
+    }
+
+    return result.toString();
+  }
+
+  /**
+   * Remove chunked value (keys format <key>#<chunk_id>)
+   * @param modelPropName key
+   */
+  protected void clear(String modelPropName) {
+    int pages = getConfig().getInt(modelPropName);
+    LOG.debug("Clean started: " + modelPropName + " pages:" + pages);
+
+    for(int page = 1; page <= pages; page++) {
+      getConfig().clearProperty(modelPropName + "#" + page);
+      LOG.debug("Chunk clean: " + modelPropName + "#" + page);
+    }
+    getConfig().clearProperty(modelPropName);
+  }
+
+  public static void storageSmokeTest(ViewContext context) {
+    try {
+      final String property = "test.smoke.property";
+      context.putInstanceData(property, "42");
+      boolean status = context.getInstanceData(property).equals("42");
+      context.removeInstanceData(property);
+      if (!status) throw new ServiceFormattedException("Ambari Views instance data DB doesn't work properly", null);
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/KeyValueStorage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/KeyValueStorage.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/KeyValueStorage.java
new file mode 100644
index 0000000..e3ed2b4
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/KeyValueStorage.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence;
+
+import com.google.gson.Gson;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.commons.configuration.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Engine for storing objects to key-value storage
+ */
+public abstract class KeyValueStorage implements Storage {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(KeyValueStorage.class);
+  protected final Gson gson = new Gson();
+  protected ViewContext context;
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public KeyValueStorage(ViewContext context) {
+    this.context = context;
+  }
+
+  /**
+   * Returns config instance, adapter to Persistence API
+   * @return config instance
+   */
+  protected abstract Configuration getConfig();
+
+  @Override
+  public <T extends Indexed> void store(Class<T> model, Indexed obj) {
+    String modelIndexingPropName = getIndexPropertyName(model);
+
+    if (obj.getId() == null) {
+      int lastIndex = getConfig().getInt(modelIndexingPropName, 0);
+      lastIndex ++;
+      getConfig().setProperty(modelIndexingPropName, lastIndex);
+      obj.setId(String.valueOf(lastIndex));
+    }
+
+    String modelPropName = getItemPropertyName(model, obj.getId());
+    String json = serialize(obj);
+    write(modelPropName, json);
+  }
+
+  @Override
+  public <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound {
+    String modelPropName = getItemPropertyName(model, id);
+    LOG.debug(String.format("Loading %s", modelPropName));
+    if (getConfig().containsKey(modelPropName)) {
+      String json = read(modelPropName);
+      LOG.debug(String.format("json: %s", json));
+
+      return deserialize(model, json);
+    } else {
+      throw new ItemNotFound();
+    }
+  }
+
+  /**
+   * Write json to storage
+   * @param modelPropName key
+   * @param json value
+   */
+  protected void write(String modelPropName, String json) {
+    getConfig().setProperty(modelPropName, json);
+  }
+
+  /**
+   * Read json from storage
+   * @param modelPropName key
+   * @return value
+   */
+  protected String read(String modelPropName) {
+    return getConfig().getString(modelPropName);
+  }
+
+  /**
+   * Remove line from storage
+   * @param modelPropName key
+   */
+  protected void clear(String modelPropName) {
+    getConfig().clearProperty(modelPropName);
+  }
+
+  protected String serialize(Indexed obj) {
+    return gson.toJson(obj);
+  }
+
+  protected <T extends Indexed> T deserialize(Class<T> model, String json) {
+    return gson.fromJson(json, model);
+  }
+
+  @Override
+  public synchronized <T extends Indexed> List<T> loadAll(Class<? extends T> model, FilteringStrategy filter) {
+    ArrayList<T> list = new ArrayList<T>();
+    String modelIndexingPropName = getIndexPropertyName(model);
+    LOG.debug(String.format("Loading all %s-s", model.getName()));
+    int lastIndex = getConfig().getInt(modelIndexingPropName, 0);
+    for(int i=1; i<=lastIndex; i++) {
+      try {
+        T item = load(model, i);
+        if ((filter == null) || filter.isConform(item)) {
+          list.add(item);
+        }
+      } catch (ItemNotFound ignored) {
+      }
+    }
+    return list;
+  }
+
+  @Override
+  public synchronized <T extends Indexed> List<T> loadAll(Class<T> model) {
+    return loadAll(model, new OnlyOwnersFilteringStrategy(this.context.getUsername()));
+  }
+
+  @Override
+  public synchronized void delete(Class model, Object id) {
+    LOG.debug(String.format("Deleting %s:%s", model.getName(), id));
+    String modelPropName = getItemPropertyName(model, id);
+    clear(modelPropName);
+  }
+
+  @Override
+  public boolean exists(Class model, Object id) {
+    return getConfig().containsKey(getItemPropertyName(model, id));
+  }
+
+  private String getIndexPropertyName(Class model) {
+    return String.format("%s:index", model.getName());
+  }
+
+  private String getItemPropertyName(Class model, Object id) {
+    return String.format("%s.%s", model.getName(), id);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/LocalKeyValueStorage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/LocalKeyValueStorage.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/LocalKeyValueStorage.java
new file mode 100644
index 0000000..9aee9d2
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/LocalKeyValueStorage.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.utils.MisconfigurationFormattedException;
+import org.apache.commons.configuration.ConfigurationException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * Persistent storage engine for storing java beans to
+ * properties file
+ * Path to file should be in 'dataworker.storagePath' parameter
+ */
+@Deprecated
+public class LocalKeyValueStorage extends KeyValueStorage {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(LocalKeyValueStorage.class);
+
+  private PersistentConfiguration config = null;
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public LocalKeyValueStorage(ViewContext context) {
+    super(context);
+  }
+
+  /**
+   * Returns config instance
+   * @return config instance
+   */
+  @Override
+  protected synchronized PersistentConfiguration getConfig() {
+    if (config == null) {
+      String fileName = context.getProperties().get("dataworker.storagePath");
+      if (fileName == null) {
+        String msg = "dataworker.storagePath is not configured!";
+        LOG.error(msg);
+        throw new MisconfigurationFormattedException("dataworker.storagePath");
+      }
+      try {
+        config = new PersistentConfiguration(fileName);
+      } catch (ConfigurationException e) {
+        e.printStackTrace();
+      }
+    }
+    return config;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/PersistentConfiguration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/PersistentConfiguration.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/PersistentConfiguration.java
new file mode 100644
index 0000000..c9d7bb7
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/PersistentConfiguration.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence;
+
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.configuration.reloading.FileChangedReloadingStrategy;
+
+import java.io.File;
+
+/**
+ * Configuration enables all necessary options for PropertiesConfiguration:
+ * auto-save, auto-reloading, no delimiter parsing and other
+ */
+@Deprecated
+public class PersistentConfiguration extends PropertiesConfiguration {
+  /**
+   * Constructor
+   * @param fileName path to data file
+   * @throws ConfigurationException
+   */
+  public PersistentConfiguration(String fileName) throws ConfigurationException {
+    super();
+
+    File config = new File(fileName);
+    setFile(config);
+    this.setAutoSave(true);
+    this.setReloadingStrategy(new FileChangedReloadingStrategy());
+    this.setDelimiterParsingDisabled(true);
+    this.setListDelimiter((char) 0);
+
+    if (config.exists()) {
+      this.load();
+    }
+  }
+}


[09/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/DeleteQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/DeleteQueryInput.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/DeleteQueryInput.java
new file mode 100644
index 0000000..e7cbf0a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/DeleteQueryInput.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.query;
+
+public class DeleteQueryInput {
+  private String database;
+  private String table;
+
+  public DeleteQueryInput() {
+  }
+
+  public DeleteQueryInput(String database, String table) {
+    this.database = database;
+    this.table = table;
+  }
+
+  public String getDatabase() {
+    return database;
+  }
+
+  public void setDatabase(String database) {
+    this.database = database;
+  }
+
+  public String getTable() {
+    return table;
+  }
+
+  public void setTable(String table) {
+    this.table = table;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/InsertFromQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/InsertFromQueryInput.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/InsertFromQueryInput.java
new file mode 100644
index 0000000..027baff
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/InsertFromQueryInput.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.query;
+
+import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+
+import java.util.List;
+
+public class InsertFromQueryInput {
+  private String fromDatabase;
+  private String fromTable;
+  private String toDatabase;
+  private String toTable;
+  private List<ColumnDescriptionImpl> header;
+  private Boolean unhexInsert = Boolean.FALSE;
+
+  public InsertFromQueryInput() {
+  }
+
+  public InsertFromQueryInput(String fromDatabase, String fromTable, String toDatabase, String toTable, List<ColumnDescriptionImpl> header, Boolean unhexInsert) {
+    this.fromDatabase = fromDatabase;
+    this.fromTable = fromTable;
+    this.toDatabase = toDatabase;
+    this.toTable = toTable;
+    this.header = header;
+    this.unhexInsert = unhexInsert;
+  }
+
+  public List<ColumnDescriptionImpl> getHeader() {
+    return header;
+  }
+
+  public void setHeader(List<ColumnDescriptionImpl> header) {
+    this.header = header;
+  }
+
+  public Boolean getUnhexInsert() {
+    return unhexInsert;
+  }
+
+  public void setUnhexInsert(Boolean unhexInsert) {
+    this.unhexInsert = unhexInsert;
+  }
+
+  public String getFromDatabase() {
+    return fromDatabase;
+  }
+
+  public void setFromDatabase(String fromDatabase) {
+    this.fromDatabase = fromDatabase;
+  }
+
+  public String getFromTable() {
+    return fromTable;
+  }
+
+  public void setFromTable(String fromTable) {
+    this.fromTable = fromTable;
+  }
+
+  public String getToDatabase() {
+    return toDatabase;
+  }
+
+  public void setToDatabase(String toDatabase) {
+    this.toDatabase = toDatabase;
+  }
+
+  public String getToTable() {
+    return toTable;
+  }
+
+  public void setToTable(String toTable) {
+    this.toTable = toTable;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/LoadQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/LoadQueryInput.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/LoadQueryInput.java
new file mode 100644
index 0000000..7873144
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/LoadQueryInput.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.query;
+
+/**
+ * input for QueryGenerator for generating Load From Query
+ */
+public class LoadQueryInput {
+  private String hdfsFilePath;
+  private String databaseName;
+  private String tableName;
+
+  public LoadQueryInput(String hdfsFilePath, String databaseName, String tableName) {
+    this.hdfsFilePath = hdfsFilePath;
+    this.databaseName = databaseName;
+    this.tableName = tableName;
+  }
+
+  public String getHdfsFilePath() {
+    return hdfsFilePath;
+  }
+
+  public void setHdfsFilePath(String hdfsFilePath) {
+    this.hdfsFilePath = hdfsFilePath;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  @Override
+  public String toString() {
+    return "LoadQueryInput{" +
+            "hdfsFilePath='" + hdfsFilePath + '\'' +
+            ", databaseName='" + databaseName + '\'' +
+            ", tableName='" + tableName + '\'' +
+            '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/QueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/QueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/QueryGenerator.java
new file mode 100644
index 0000000..dda178d
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/QueryGenerator.java
@@ -0,0 +1,143 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.query;
+
+import org.apache.ambari.view.hive.resources.uploads.query.RowFormat;
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive20.resources.uploads.HiveFileType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+/**
+ * generates the sql query from given data
+ */
+public class QueryGenerator {
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(QueryGenerator.class);
+
+  public String generateCreateQuery(TableInfo tableInfo) {
+    String tableName = tableInfo.getTableName();
+    List<ColumnDescriptionImpl> cdList = tableInfo.getHeader();
+
+    StringBuilder query = new StringBuilder();
+    query.append("CREATE TABLE ").append(tableName).append(" (");
+    Collections.sort(cdList, new Comparator<ColumnDescription>() {
+      @Override
+      public int compare(ColumnDescription o1, ColumnDescription o2) {
+        return o1.getPosition() - o2.getPosition();
+      }
+    });
+
+    boolean first = true;
+    for (ColumnDescriptionImpl cd : cdList) {
+      if (first) {
+        first = false;
+      } else {
+        query.append(", ");
+      }
+
+      query.append(cd.getName()).append(" ").append(cd.getType());
+      if (cd.getPrecision() != null) {
+        query.append("(").append(cd.getPrecision());
+        if (cd.getScale() != null) {
+          query.append(",").append(cd.getScale());
+        }
+        query.append(")");
+      }
+
+    }
+
+    query.append(")");
+
+    if(tableInfo.getHiveFileType().equals(HiveFileType.TEXTFILE)) {
+      query.append(getRowFormatQuery(tableInfo.getRowFormat()));
+    }
+    query.append(" STORED AS ").append(tableInfo.getHiveFileType().toString());
+    String queryString = query.append(";").toString();
+    LOG.info("Query : {}", queryString);
+    return queryString;
+  }
+
+  private String getRowFormatQuery(RowFormat rowFormat) {
+    StringBuilder sb = new StringBuilder();
+    if(rowFormat != null) {
+      sb.append(" ROW FORMAT DELIMITED");
+      if(rowFormat.getFieldsTerminatedBy() != null ){
+        sb.append(" FIELDS TERMINATED BY '").append(rowFormat.getFieldsTerminatedBy()).append('\'');
+      }
+      if(rowFormat.getEscapedBy() != null){
+        String escape = String.valueOf(rowFormat.getEscapedBy());
+        if(rowFormat.getEscapedBy() == '\\'){
+          escape = escape + '\\'; // special handling of slash as its escape char for strings in hive as well.
+        }
+        sb.append(" ESCAPED BY '").append(escape).append('\'');
+      }
+    }
+
+    return sb.toString();
+  }
+
+  public String generateInsertFromQuery(InsertFromQueryInput ifqi) {
+    StringBuilder insertQuery = new StringBuilder("INSERT INTO TABLE ").append(ifqi.getToDatabase()).append(".")
+                                .append(ifqi.getToTable()).append(" SELECT ");
+
+    boolean first = true;
+    for(ColumnDescriptionImpl column : ifqi.getHeader()){
+      String type = column.getType();
+      boolean unhex = ifqi.getUnhexInsert() && (
+        ColumnDescription.DataTypes.STRING.toString().equals(type)
+          || ColumnDescription.DataTypes.VARCHAR.toString().equals(type)
+          || ColumnDescription.DataTypes.CHAR.toString().equals(type)
+      );
+
+      if(!first){
+        insertQuery.append(", ");
+      }
+
+      if(unhex) {
+        insertQuery.append("UNHEX(");
+      }
+
+      insertQuery.append(column.getName());
+
+      if(unhex) {
+        insertQuery.append(")");
+      }
+
+      first = false;
+    }
+
+    insertQuery.append(" FROM ").append(ifqi.getFromDatabase()).append(".").append(ifqi.getFromTable()).append(";");
+    String query = insertQuery.toString();
+    LOG.info("Insert Query : {}", query);
+    return query;
+  }
+
+  public String generateDropTableQuery(DeleteQueryInput deleteQueryInput) {
+    String dropQuery = new StringBuilder("DROP TABLE ").append(deleteQueryInput.getDatabase())
+                      .append(".").append(deleteQueryInput.getTable()).append(";").toString();
+    LOG.info("Drop Query : {}", dropQuery);
+    return dropQuery;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/RowFormat.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/RowFormat.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/RowFormat.java
new file mode 100644
index 0000000..4c1cb2b
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/RowFormat.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+public class RowFormat {
+  private Character fieldsTerminatedBy;
+  private Character escapedBy;
+
+  private RowFormat() {
+  }
+
+  public RowFormat(Character fieldsTerminatedBy, Character escapedBy) {
+    this.fieldsTerminatedBy = fieldsTerminatedBy;
+    this.escapedBy = escapedBy;
+  }
+
+  public Character getFieldsTerminatedBy() {
+    return fieldsTerminatedBy;
+  }
+
+  public void setFieldsTerminatedBy(Character fieldsTerminatedBy) {
+    this.fieldsTerminatedBy = fieldsTerminatedBy;
+  }
+
+  public Character getEscapedBy() {
+    return escapedBy;
+  }
+
+  public void setEscapedBy(Character escapedBy) {
+    this.escapedBy = escapedBy;
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("RowFormat{ fieldsTerminatedBy='");
+    sb.append(fieldsTerminatedBy).append( '\'').append(", escapedBy='")
+      .append(escapedBy).append("\'}");
+
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/TableInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/TableInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/TableInfo.java
new file mode 100644
index 0000000..bb39271
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/TableInfo.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.uploads.query;
+
+import org.apache.ambari.view.hive.resources.uploads.query.RowFormat;
+import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive20.resources.uploads.HiveFileType;
+
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ * used as input in Query generation
+ */
+public class TableInfo implements Serializable{
+  private String tableName;
+  private String databaseName;
+  private List<ColumnDescriptionImpl> header;
+  private HiveFileType hiveFileType;
+
+  private RowFormat rowFormat;
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public List<ColumnDescriptionImpl> getHeader() {
+    return header;
+  }
+
+  public void setHeader(List<ColumnDescriptionImpl> header) {
+    this.header = header;
+  }
+
+  public HiveFileType getHiveFileType() {
+    return hiveFileType;
+  }
+
+  public void setHiveFileType(HiveFileType hiveFileType) {
+    this.hiveFileType = hiveFileType;
+  }
+
+  public RowFormat getRowFormat() {
+    return rowFormat;
+  }
+
+  public void setRowFormat(RowFormat rowFormat) {
+    this.rowFormat = rowFormat;
+  }
+
+  public TableInfo(String databaseName, String tableName, List<ColumnDescriptionImpl> header, HiveFileType hiveFileType, RowFormat rowFormat) {
+    this.databaseName = databaseName;
+    this.tableName = tableName;
+    this.header = header;
+    this.hiveFileType = hiveFileType;
+    this.rowFormat = rowFormat;
+  }
+
+  public TableInfo(TableInfo tableInfo) {
+    this.tableName = tableInfo.tableName;
+    this.databaseName = tableInfo.databaseName;
+    this.header = tableInfo.header;
+    this.hiveFileType = tableInfo.hiveFileType;
+    this.rowFormat = tableInfo.rowFormat;
+  }
+
+  public TableInfo() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/BadRequestFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/BadRequestFormattedException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/BadRequestFormattedException.java
new file mode 100644
index 0000000..e152d79
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/BadRequestFormattedException.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+public class BadRequestFormattedException extends ServiceFormattedException {
+  private final static int STATUS = 400;
+
+  public BadRequestFormattedException(String message, Throwable exception) {
+    super(message, exception, STATUS);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/FilePaginator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/FilePaginator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/FilePaginator.java
new file mode 100644
index 0000000..92a0237
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/FilePaginator.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.hadoop.fs.FSDataInputStream;
+
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+
+import static java.lang.Math.ceil;
+
+/**
+ * Pagination for HDFS file implementation
+ */
+public class FilePaginator {
+  public static int MB = 1024*1024;
+  public static int PAGE_SIZE = 1*MB;
+
+  private String filePath;
+  private HdfsApi hdfsApi;
+
+  /**
+   * Constructor
+   * @param filePath Path to file on HDFS
+   * @param hdfsApi hdfs api
+   */
+  public FilePaginator(String filePath, HdfsApi hdfsApi) {
+    this.filePath = filePath;
+    this.hdfsApi = hdfsApi;
+  }
+
+  /**
+   * Set page size
+   * @param PAGE_SIZE size
+   */
+  public static void setPageSize(int PAGE_SIZE) {
+    FilePaginator.PAGE_SIZE = PAGE_SIZE;
+  }
+
+  /**
+   * Get page count
+   * @return page count
+   * @throws java.io.IOException
+   * @throws InterruptedException
+   */
+  public long pageCount() throws IOException, InterruptedException {
+    return (long)
+        ceil( hdfsApi.getFileStatus(filePath).getLen() / ((double)PAGE_SIZE) );
+  }
+
+  /**
+   * Read one page of size PAGE_SIZE
+   * @param page page index
+   * @return data in UTF-8
+   * @throws java.io.IOException
+   * @throws InterruptedException
+   */
+  public String readPage(long page) throws IOException, InterruptedException {
+    FSDataInputStream stream = hdfsApi.open(filePath);
+    try {
+      stream.seek(page * PAGE_SIZE);
+    } catch (IOException e) {
+      throw new IllegalArgumentException("Page " + page + " does not exists");
+    }
+
+    byte[] buffer = new byte[PAGE_SIZE];
+    int readCount = 0;
+    int read = 0;
+    while(read < PAGE_SIZE) {
+      try {
+        readCount = stream.read(buffer, read, PAGE_SIZE-read);
+      } catch (IOException e) {
+        stream.close();
+        throw e;
+      }
+      if (readCount == -1)
+        break;
+      read += readCount;
+    }
+    if (read != 0) {
+      byte[] readData = Arrays.copyOfRange(buffer, 0, read);
+      return new String(readData, Charset.forName("UTF-8"));
+    } else {
+      if (page == 0) {
+        return "";
+      }
+      throw new IllegalArgumentException("Page " + page + " does not exists");
+    }
+  }
+
+  public String readFull(long sizeLimit) throws IOException, InterruptedException {
+    StringBuilder builder = new StringBuilder();
+    int i = 0;
+    while (true) {
+      try {
+        builder.append(readPage(i++));
+      } catch (IllegalArgumentException ex) {
+        break;
+      }
+      if (sizeLimit != -1 && (i+1)*PAGE_SIZE > sizeLimit)
+        break;
+    }
+    return builder.toString();
+  }
+
+  public String readFull() throws IOException, InterruptedException {
+    return readFull(-1);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/HiveActorConfiguration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/HiveActorConfiguration.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/HiveActorConfiguration.java
new file mode 100644
index 0000000..f759390
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/HiveActorConfiguration.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+import org.apache.ambari.view.ViewContext;
+
+/**
+ * This fetches the configuration for the actor system from ambari.properties
+ */
+public class HiveActorConfiguration {
+  private static String DEFAULT_CONFIG = "default";
+  private static String CONNECTION_PREFIX = "views.ambari.hive.";
+  private static String CONNECTION_INACTIVITY_TIMEOUT_PATTERN = CONNECTION_PREFIX + "%s.connection.inactivity.timeout";
+  private static String CONNECTION_TERMINATION_TIMEOUT_PATTERN = CONNECTION_PREFIX + "%s.connection.termination.timeout";
+  private static String SYNC_QUERY_TIMEOUT_PATTERN = CONNECTION_PREFIX + "%s.sync.query.timeout";
+  private static String RESULT_FETCH_TIMEOUT_PATTERN = CONNECTION_PREFIX + "%s.result.fetch.timeout";
+
+  private final ViewContext context;
+
+  public HiveActorConfiguration(ViewContext context) {
+    this.context = context;
+  }
+
+  public long getInactivityTimeout(long defaultValue) {
+    return Long.parseLong(getPropertiesFromContext(CONNECTION_INACTIVITY_TIMEOUT_PATTERN, String.valueOf(defaultValue)));
+  }
+
+  public long getTerminationTimeout(long defaultValue) {
+    return Long.parseLong(getPropertiesFromContext(CONNECTION_TERMINATION_TIMEOUT_PATTERN, String.valueOf(defaultValue)));
+  }
+
+  public long getSyncQueryTimeout(long defaultValue) {
+    return Long.parseLong(getPropertiesFromContext(SYNC_QUERY_TIMEOUT_PATTERN, String.valueOf(defaultValue)));
+  }
+
+  public long getResultFetchTimeout(long defaultValue) {
+    return Long.parseLong(getPropertiesFromContext(RESULT_FETCH_TIMEOUT_PATTERN, String.valueOf(defaultValue)));
+  }
+
+  /**
+   * Tries to get the specific configuration with the instance name. If not found then tries to
+   * find the default set in ambari.properties. If not found then returns the default value passed
+   * @param keyPattern Pattern used to generate ambari.properties key
+   * @param defaultValue Returned when the value is not found in ambari.properties
+   * @return value of the property
+   */
+  private String getPropertiesFromContext(String keyPattern, String defaultValue) {
+    String value;
+    value = context.getAmbariProperty(String.format(keyPattern, context.getInstanceName()));
+    if(value == null) {
+      value = context.getAmbariProperty(String.format(keyPattern, DEFAULT_CONFIG));
+    }
+    return value == null ? defaultValue: value;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/HiveClientFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/HiveClientFormattedException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/HiveClientFormattedException.java
new file mode 100644
index 0000000..ac63708
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/HiveClientFormattedException.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+public class HiveClientFormattedException extends ServiceFormattedException {
+
+  public HiveClientFormattedException(Throwable exception) {
+    super(exception.getMessage(), exception);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/LoggingOutputStream.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/LoggingOutputStream.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/LoggingOutputStream.java
new file mode 100644
index 0000000..bf13faf
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/LoggingOutputStream.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+import org.slf4j.Logger;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+
+public class LoggingOutputStream extends OutputStream {
+
+    private final ByteArrayOutputStream baos = new ByteArrayOutputStream(1000);
+    private final Logger logger;
+    private final LogLevel level;
+
+    public enum LogLevel {
+        TRACE, DEBUG, INFO, WARN, ERROR,
+    }
+
+    public LoggingOutputStream(Logger logger, LogLevel level) {
+        this.logger = logger;
+        this.level = level;
+    }
+
+    @Override
+    public void write(int b) {
+        if (b == '\n') {
+            String line = baos.toString();
+            baos.reset();
+
+            switch (level) {
+                case TRACE:
+                    logger.trace(line);
+                    break;
+                case DEBUG:
+                    logger.debug(line);
+                    break;
+                case ERROR:
+                    logger.error(line);
+                    break;
+                case INFO:
+                    logger.info(line);
+                    break;
+                case WARN:
+                    logger.warn(line);
+                    break;
+            }
+        } else {
+            baos.write(b);
+        }
+    }
+
+
+    /**
+     * Closes this output stream and releases any system resources
+     * associated with this stream. The general contract of <code>close</code>
+     * is that it closes the output stream. A closed stream cannot perform
+     * output operations and cannot be reopened.
+     * <p/>
+     * The <code>close</code> method of <code>OutputStream</code> does nothing.
+     *
+     * @throws IOException if an I/O error occurs.
+     */
+    @Override
+    public void close() throws IOException {
+        baos.close();
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MisconfigurationFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MisconfigurationFormattedException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MisconfigurationFormattedException.java
new file mode 100644
index 0000000..ddcc342
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/MisconfigurationFormattedException.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.util.HashMap;
+
+public class MisconfigurationFormattedException extends WebApplicationException {
+  private final static int STATUS = 500;
+  private final static String message = "Parameter \"%s\" is set to null";
+  private final static Logger LOG =
+      LoggerFactory.getLogger(MisconfigurationFormattedException.class);
+
+  public MisconfigurationFormattedException(String name) {
+    super(errorEntity(name));
+  }
+
+  protected static Response errorEntity(String name) {
+    HashMap<String, Object> response = new HashMap<String, Object>();
+    response.put("message", String.format(message, name));
+    response.put("trace", null);
+    response.put("status", STATUS);
+    return Response.status(STATUS).entity(new JSONObject(response)).type(MediaType.APPLICATION_JSON).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/NotFoundFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/NotFoundFormattedException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/NotFoundFormattedException.java
new file mode 100644
index 0000000..fd18dfa
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/NotFoundFormattedException.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+public class NotFoundFormattedException extends ServiceFormattedException {
+  private final static int STATUS = 404;
+
+  public NotFoundFormattedException(String message, Throwable exception) {
+    super(message, exception, STATUS);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ResultFetchFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ResultFetchFormattedException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ResultFetchFormattedException.java
new file mode 100644
index 0000000..d83d361
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ResultFetchFormattedException.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+public class ResultFetchFormattedException extends ServiceFormattedException {
+  private final static int STATUS = 500;
+
+  public ResultFetchFormattedException(String message, Throwable exception) {
+    super(message, exception, STATUS);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ResultNotReadyFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ResultNotReadyFormattedException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ResultNotReadyFormattedException.java
new file mode 100644
index 0000000..cf8081f
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ResultNotReadyFormattedException.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+public class ResultNotReadyFormattedException extends ServiceFormattedException {
+  private final static int STATUS = 409;
+
+  public ResultNotReadyFormattedException(String message, Throwable exception) {
+    super(message, exception, STATUS);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ServiceFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ServiceFormattedException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ServiceFormattedException.java
new file mode 100644
index 0000000..909a219
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/ServiceFormattedException.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.security.AccessControlException;
+import java.util.HashMap;
+
+public class ServiceFormattedException extends WebApplicationException {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(ServiceFormattedException.class);
+
+  public ServiceFormattedException(String message) {
+    super(errorEntity(message, null, suggestStatus(null), null));
+  }
+
+  public ServiceFormattedException(Throwable exception) {
+    super(errorEntity(null, exception, suggestStatus(exception), null));
+  }
+
+  public ServiceFormattedException(String message, Throwable exception) {
+    super(errorEntity(message, exception, suggestStatus(exception), null));
+  }
+
+  public ServiceFormattedException(String message, Throwable exception, int status) {
+    super(errorEntity(message, exception, status, null));
+  }
+
+  public ServiceFormattedException(String message, Exception ex, String curl) {
+    super(errorEntity(message, ex, suggestStatus(ex), curl));
+  }
+
+  private static int suggestStatus(Throwable exception) {
+    int status = 500;
+    if (exception == null) {
+      return status;
+    }
+    if (exception instanceof AccessControlException) {
+      status = 403;
+    }
+    /*if (exception instanceof HiveInvalidQueryException) {
+      status = 400;
+    }*/
+    return status;
+  }
+
+  protected static Response errorEntity(String message, Throwable e, int status, String header) {
+    HashMap<String, Object> response = new HashMap<String, Object>();
+
+    String trace = null;
+
+    response.put("message", message);
+    if (e != null) {
+      trace = e.toString() + "\n\n";
+      StringWriter sw = new StringWriter();
+      e.printStackTrace(new PrintWriter(sw));
+      trace += sw.toString();
+
+      if (message == null) {
+        String innerMessage = e.getMessage();
+        String autoMessage;
+
+        if (innerMessage != null)
+          autoMessage = String.format("E090 %s [%s]", innerMessage, e.getClass().getSimpleName());
+        else
+          autoMessage = "E090 " + e.getClass().getSimpleName();
+        response.put("message", autoMessage);
+      }
+    }
+    response.put("trace", trace);
+    response.put("status", status);
+
+    if(message != null && status != 400) LOG.error(message);
+    if(trace != null && status != 400) LOG.error(trace);
+
+    Response.ResponseBuilder responseBuilder = Response.status(status).entity(new JSONObject(response)).type(MediaType.APPLICATION_JSON);
+    if (header != null)
+      responseBuilder.header("X-INFO", header);
+    return responseBuilder.build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/SharedObjectsFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/SharedObjectsFactory.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/SharedObjectsFactory.java
new file mode 100644
index 0000000..5bc6070
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/SharedObjectsFactory.java
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.IStorageFactory;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.apache.ambari.view.hive20.persistence.utils.StorageFactory;
+import org.apache.ambari.view.hive20.resources.jobs.atsJobs.ATSParser;
+import org.apache.ambari.view.hive20.resources.jobs.atsJobs.ATSParserFactory;
+import org.apache.ambari.view.hive20.resources.jobs.rm.RMParser;
+import org.apache.ambari.view.hive20.resources.jobs.rm.RMParserFactory;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.IJobControllerFactory;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobControllerFactory;
+import org.apache.ambari.view.hive20.resources.savedQueries.SavedQueryResourceManager;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * Generates shared connections. Clients with same tag will get the same connection.
+ * e.g. user 'admin' using view instance 'HIVE1' will use one connection, another user
+ * will use different connection.
+ */
+public class SharedObjectsFactory implements IStorageFactory {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(SharedObjectsFactory.class);
+
+  private ViewContext context;
+  private final IStorageFactory storageFactory;
+  private final ATSParserFactory atsParserFactory;
+  private final RMParserFactory rmParserFactory;
+
+  private static final Map<Class, Map<String, Object>> localObjects = new ConcurrentHashMap<Class, Map<String, Object>>();
+
+  public SharedObjectsFactory(ViewContext context) {
+    this.context = context;
+    this.storageFactory = new StorageFactory(context);
+    this.atsParserFactory = new ATSParserFactory(context);
+    this.rmParserFactory = new RMParserFactory(context);
+
+    synchronized (localObjects) {
+      if (localObjects.size() == 0) {
+        //localObjects.put(OperationHandleControllerFactory.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(Storage.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(IJobControllerFactory.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(ATSParser.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(SavedQueryResourceManager.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(HdfsApi.class, new ConcurrentHashMap<String, Object>());
+        localObjects.put(RMParser.class, new ConcurrentHashMap<String, Object>());
+      }
+    }
+  }
+
+  // =============================
+
+  /*public OperationHandleControllerFactory getOperationHandleControllerFactory() {
+    if (!localObjects.get(OperationHandleControllerFactory.class).containsKey(getTagName()))
+      localObjects.get(OperationHandleControllerFactory.class).put(getTagName(), new OperationHandleControllerFactory(context, this));
+    return (OperationHandleControllerFactory) localObjects.get(OperationHandleControllerFactory.class).get(getTagName());
+  }*/
+
+  // =============================
+  @Override
+  public Storage getStorage() {
+    if (!localObjects.get(Storage.class).containsKey(getTagName()))
+      localObjects.get(Storage.class).put(getTagName(), storageFactory.getStorage());
+    return (Storage) localObjects.get(Storage.class).get(getTagName());
+  }
+
+  // =============================
+  public IJobControllerFactory getJobControllerFactory() {
+    if (!localObjects.get(IJobControllerFactory.class).containsKey(getTagName()))
+      localObjects.get(IJobControllerFactory.class).put(getTagName(), new JobControllerFactory(context, this));
+    return (IJobControllerFactory) localObjects.get(IJobControllerFactory.class).get(getTagName());
+  }
+
+  // =============================
+
+  public SavedQueryResourceManager getSavedQueryResourceManager() {
+    if (!localObjects.get(SavedQueryResourceManager.class).containsKey(getTagName()))
+      localObjects.get(SavedQueryResourceManager.class).put(getTagName(), new SavedQueryResourceManager(context, this));
+    return (SavedQueryResourceManager) localObjects.get(SavedQueryResourceManager.class).get(getTagName());
+  }
+
+  // =============================
+  public ATSParser getATSParser() {
+    if (!localObjects.get(ATSParser.class).containsKey(getTagName()))
+      localObjects.get(ATSParser.class).put(getTagName(), atsParserFactory.getATSParser());
+    return (ATSParser) localObjects.get(ATSParser.class).get(getTagName());
+  }
+
+  // =============================
+  public RMParser getRMParser() {
+    if (!localObjects.get(RMParser.class).containsKey(getTagName()))
+      localObjects.get(RMParser.class).put(getTagName(), rmParserFactory.getRMParser());
+    return (RMParser) localObjects.get(RMParser.class).get(getTagName());
+  }
+
+  // =============================
+  public HdfsApi getHdfsApi() {
+    if (!localObjects.get(HdfsApi.class).containsKey(getTagName())) {
+      try {
+        localObjects.get(HdfsApi.class).put(getTagName(), HdfsUtil.connectToHDFSApi(context));
+      } catch (HdfsApiException e) {
+        String message = "F060 Couldn't open connection to HDFS";
+        LOG.error(message);
+        throw new ServiceFormattedException(message, e);
+      }
+    }
+    return (HdfsApi) localObjects.get(HdfsApi.class).get(getTagName());
+  }
+
+  /**
+   * Generates tag name. Clients with same tag will share one connection.
+   * @return tag name
+   */
+  public String getTagName() {
+    if (context == null)
+      return "<null>";
+    return String.format("%s:%s", context.getInstanceName(), context.getUsername());
+  }
+
+  /**
+   * For testing purposes, ability to substitute some local object
+   */
+  public void setInstance(Class clazz, Object object) {
+    localObjects.get(clazz).put(getTagName(), object);
+  }
+
+  /**
+   * For testing purposes, ability to clear all local objects of particular class
+   */
+  public void clear(Class clazz) {
+    localObjects.get(clazz).clear();
+  }
+
+  /**
+   * For testing purposes, ability to clear all connections
+   */
+  public void clear() {
+    for(Map<String, Object> map : localObjects.values()) {
+      map.clear();
+    }
+  }
+
+  /**
+   *
+   * Drops all objects for give instance name.
+   *
+   * @param instanceName
+   */
+  public static void dropInstanceCache(String instanceName){
+    for(Map<String,Object> cache : localObjects.values()){
+      for(Iterator<Map.Entry<String, Object>> it = cache.entrySet().iterator(); it.hasNext();){
+        Map.Entry<String, Object> entry = it.next();
+        if(entry.getKey().startsWith(instanceName+":")){
+          it.remove();
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/UniqueConstraintViolationException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/UniqueConstraintViolationException.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/UniqueConstraintViolationException.java
new file mode 100644
index 0000000..8d25f7a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/UniqueConstraintViolationException.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.utils;
+
+/**
+ *
+ */
+public class UniqueConstraintViolationException extends ServiceFormattedException {
+
+  private final static int STATUS = 409;
+
+  public UniqueConstraintViolationException(String message) {
+    super(message, null, STATUS);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/application.conf
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/application.conf b/contrib/views/hive20/src/main/resources/application.conf
new file mode 100644
index 0000000..8cb45ad
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/application.conf
@@ -0,0 +1,57 @@
+#
+# Copyright 2011 The Apache Software Foundation
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+akka {
+
+  # Log level used by the configured loggers (see "loggers") as soon
+  # as they have been started; before that, see "stdout-loglevel"
+  # Options: OFF, ERROR, WARNING, INFO, DEBUG
+  loglevel = "DEBUG"
+
+  # Log level for the very basic logger activated during ActorSystem startup.
+  # This logger prints the log messages to stdout (System.out).
+  # Options: OFF, ERROR, WARNING, INFO, DEBUG
+  stdout-loglevel = "WARNING"
+
+  actor {
+
+    # Dispatcher to be used for creating Jdbc Connector actors (sync/Async)
+    jdbc-connector-dispatcher {
+      fork-join-executor {
+        parallelism-factor = 5.0
+      }
+    }
+
+    # Dispatcher to be used for creating Resultset related actors
+    result-dispatcher {
+      fork-join-executor {
+        parallelism-factor = 10.0
+      }
+    }
+
+    # Dispatcher to be used for creating miscellaneous actors like logger, exceptionWriter etc
+    misc-dispatcher {
+      fork-join-executor {
+        parallelism-factor = 5.0
+      }
+    }
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/.bowerrc
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/.bowerrc b/contrib/views/hive20/src/main/resources/ui/.bowerrc
new file mode 100644
index 0000000..959e169
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/.bowerrc
@@ -0,0 +1,4 @@
+{
+  "directory": "bower_components",
+  "analytics": false
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/.editorconfig
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/.editorconfig b/contrib/views/hive20/src/main/resources/ui/.editorconfig
new file mode 100644
index 0000000..219985c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/.editorconfig
@@ -0,0 +1,20 @@
+# EditorConfig helps developers define and maintain consistent
+# coding styles between different editors and IDEs
+# editorconfig.org
+
+root = true
+
+
+[*]
+end_of_line = lf
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
+indent_style = space
+indent_size = 2
+
+[*.hbs]
+insert_final_newline = false
+
+[*.{diff,md}]
+trim_trailing_whitespace = false

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/.ember-cli
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/.ember-cli b/contrib/views/hive20/src/main/resources/ui/.ember-cli
new file mode 100644
index 0000000..5a339b9
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/.ember-cli
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+{
+  /**
+    Ember CLI sends analytics information by default. The data is completely
+    anonymous, but there are times when you might want to disable this behavior.
+
+    Setting `disableAnalytics` to true will prevent any data from being sent.
+  */
+  "disableAnalytics": false
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/.gitignore
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/.gitignore b/contrib/views/hive20/src/main/resources/ui/.gitignore
new file mode 100644
index 0000000..8470f82
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/.gitignore
@@ -0,0 +1,18 @@
+# See http://help.github.com/ignore-files/ for more about ignoring files.
+
+# compiled output
+/dist
+/tmp
+
+# dependencies
+/node
+/node_modules
+/bower_components
+
+# misc
+/.sass-cache
+/connect.lock
+/coverage/*
+/libpeerconnection.log
+npm-debug.log
+testem.log

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/.jshintrc
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/.jshintrc b/contrib/views/hive20/src/main/resources/ui/.jshintrc
new file mode 100644
index 0000000..d421faa
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/.jshintrc
@@ -0,0 +1,32 @@
+{
+  "predef": [
+    "document",
+    "window",
+    "-Promise"
+  ],
+  "browser": true,
+  "boss": true,
+  "curly": true,
+  "debug": false,
+  "devel": true,
+  "eqeqeq": true,
+  "evil": true,
+  "forin": false,
+  "immed": false,
+  "laxbreak": false,
+  "newcap": true,
+  "noarg": true,
+  "noempty": false,
+  "nonew": false,
+  "nomen": false,
+  "onevar": false,
+  "plusplus": false,
+  "regexp": false,
+  "undef": true,
+  "sub": true,
+  "strict": false,
+  "white": false,
+  "eqnull": true,
+  "esversion": 6,
+  "unused": true
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/.travis.yml
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/.travis.yml b/contrib/views/hive20/src/main/resources/ui/.travis.yml
new file mode 100644
index 0000000..a4519ba
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/.travis.yml
@@ -0,0 +1,40 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+---
+language: node_js
+node_js:
+  - "4"
+
+sudo: false
+
+cache:
+  directories:
+    - node_modules
+
+before_install:
+  - npm config set spin false
+  - npm install -g bower
+  - bower --version
+  - npm install phantomjs-prebuilt
+  - phantomjs --version
+
+install:
+  - npm install
+  - bower install
+
+script:
+  - npm test

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/.watchmanconfig
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/.watchmanconfig b/contrib/views/hive20/src/main/resources/ui/.watchmanconfig
new file mode 100644
index 0000000..6ec27cc
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/.watchmanconfig
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+{
+  "ignore_dirs": ["tmp", "dist"]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/README.md
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/README.md b/contrib/views/hive20/src/main/resources/ui/README.md
new file mode 100644
index 0000000..5b6e3e5
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/README.md
@@ -0,0 +1,28 @@
+<!---
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+# Ui
+
+To start the UI in development mode, developer has to proxy the xhr calls
+to correct ambari endpoint.
+
+**ember serve --proxy http://c6401.ambari.apache.org:8080/api/v1/views/HIVE/versions/{version}/instances/{instance_name}**
+
+Example:
+```
+$ cd src/main/resources/ui
+$ ember serve --proxy http://c6401.ambari.apache.org:8080/api/v1/views/HIVE/versions/1.5.0/instances/AUTO_HIVE_INSTANCE
+```
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/adapters/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/application.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/application.js
new file mode 100644
index 0000000..82d53e4
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/application.js
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import DS from 'ember-data';
+import ENV from 'ui/config/environment';
+
+export default DS.RESTAdapter.extend({
+  init: function() {
+    Ember.$.ajaxSetup({
+      cache: false
+    });
+  },
+
+  namespace: Ember.computed(function() {
+    var parts = window.location.pathname.split('/').filter(function(i) {
+      return i !== "";
+    });
+    var view = parts[parts.length - 3];
+    var version = '/versions/' + parts[parts.length - 2];
+    var instance = parts[parts.length - 1];
+
+    if (!/^(\d+\.){2,3}\d+$/.test(parts[parts.length - 2])) { // version is not present
+      instance = parts[parts.length - 2];
+      version = '';
+    }
+    if(ENV.environment === 'development') {
+      return 'resources';
+    }
+    return 'api/v1/views/' + view + version + '/instances/' + instance + '/resources';
+  }),
+
+  headers: Ember.computed(function() {
+    let headers = {
+      'X-Requested-By': 'ambari',
+      'Content-Type': 'application/json'
+    };
+
+    if(ENV.environment === 'development') {
+      // In development mode when the UI is served using ember serve the xhr requests are proxied to ambari server
+      // by setting the proxyurl parameter in ember serve and for ambari to authenticate the requests, it needs this
+      // basic authorization. This is for default admin/admin username/password combination.
+      headers['Authorization'] = 'Basic YWRtaW46YWRtaW4=';
+    }
+     return headers;
+  }),
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/adapters/database.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/database.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/database.js
new file mode 100644
index 0000000..6594309
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/database.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import DDLAdapter from './ddl';
+
+export default DDLAdapter.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/adapters/ddl.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/ddl.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/ddl.js
new file mode 100644
index 0000000..003b445
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/ddl.js
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import ApplicationAdapter from './application';
+
+export default ApplicationAdapter.extend({
+  namespace: Ember.computed(function() {
+    return this._super(...arguments) + '/ddl';
+  })
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/adapters/hdfs-viewer.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/hdfs-viewer.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/hdfs-viewer.js
new file mode 100644
index 0000000..dcd52b8
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/hdfs-viewer.js
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import ApplicationAdapter from './application';
+
+export default ApplicationAdapter.extend({
+
+  namespace: Ember.computed(function() {
+    return this._super(...arguments) + '/directories/ops/listdir';
+  })
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/adapters/job.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/job.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/job.js
new file mode 100644
index 0000000..8ccd7ad
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/job.js
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import ApplicationAdapter from './application';
+
+export default ApplicationAdapter.extend({
+  fetchResult(jobId) {
+    let resultUrl = this.urlForFindRecord(jobId, 'job') + "/results";
+    return this.ajax(resultUrl, 'GET');
+  },
+
+  getQuery(job) {
+    let queryUrl = this.buildURL() + "/file" + encodeURI(job.get('queryFile'));
+    console.log(queryUrl);
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js
new file mode 100644
index 0000000..20c6d9c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/ping.js
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import ApplicationAdapter from './application';
+
+export default ApplicationAdapter.extend({
+  ping() {
+    const url = this.urlForCreateRecord('ping');
+    return this.ajax(url, 'POST');
+  },
+
+  pathForType() {
+    return "system/ping";
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/adapters/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/query.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/query.js
new file mode 100644
index 0000000..a666b09
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/query.js
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import ApplicationAdapter from './application';
+
+export default ApplicationAdapter.extend({
+
+  buildURL(){
+    return this._super(...arguments) + '/jobs/';
+  },
+
+  createJob(payload) {
+    let postURL = this.buildURL();
+    return this.ajax(postURL , 'POST', { data: {job: payload} });
+  },
+  getJob(jobId, dateSubmitted, firstCall){
+
+    let url = '';
+    if(firstCall){
+      url = this.buildURL() + jobId + '/results?first=true&_='+ dateSubmitted;
+    }else {
+      url = this.buildURL() + jobId + '/results?_='+ dateSubmitted;
+    }
+
+    return this.ajax(url, 'GET')
+  }
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/adapters/table-info.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/table-info.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/table-info.js
new file mode 100644
index 0000000..46e7591
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/table-info.js
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import DDLAdapter from './ddl';
+
+export default DDLAdapter.extend({
+
+  buildURL(modelName, id, snapshot, requestType, query) {
+    // Check if the query is to find all tables for a particular database
+    if(Ember.isEmpty(id) && (requestType === 'query' || requestType == 'queryRecord')) {
+      let dbId = query.databaseId;
+      let tableName = query.tableName;
+      let origFindAllUrl = this._super(...arguments);
+      let prefix = origFindAllUrl.substr(0, origFindAllUrl.lastIndexOf("/"));
+      delete query.databaseId;
+      delete query.tableName;
+      return `${prefix}/databases/${dbId}/tables/${tableName}/info`;
+    }
+    return this._super(...arguments);
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/adapters/table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/table.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/table.js
new file mode 100644
index 0000000..f4d1615
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/table.js
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import DDLAdapter from './ddl';
+
+export default DDLAdapter.extend({
+  buildURL(modelName, id, snapshot, requestType, query) {
+    // Check if the query is to find all tables for a particular database
+    if(Ember.isEmpty(id) && (requestType === 'query' || requestType == 'queryRecord')) {
+      let dbId = query.databaseId;
+      let tableName = query.tableName;
+      let origFindAllUrl = this._super(...arguments);
+      let prefix = origFindAllUrl.substr(0, origFindAllUrl.lastIndexOf("/"));
+      delete query.databaseId;
+      delete query.tableName;
+      if(Ember.isEmpty(tableName)) {
+        return `${prefix}/databases/${dbId}/tables`;
+      } else {
+        return `${prefix}/databases/${dbId}/tables/${tableName}`;
+      }
+    }
+    return this._super(...arguments);
+  },
+
+
+  createTable(tableMetaInfo) {
+    let postURL = this.buildURL('table', null, null, 'query', {databaseId: tableMetaInfo.database});
+    console.log(postURL);
+    console.log(tableMetaInfo);
+    return this.ajax(postURL, 'POST', { data: {tableInfo: tableMetaInfo} });
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/app.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/app.js b/contrib/views/hive20/src/main/resources/ui/app/app.js
new file mode 100644
index 0000000..af4fdc4
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/app.js
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import Resolver from './resolver';
+import loadInitializers from 'ember-load-initializers';
+import config from './config/environment';
+
+let App;
+
+Ember.MODEL_FACTORY_INJECTIONS = true;
+
+App = Ember.Application.extend({
+  modulePrefix: config.modulePrefix,
+  podModulePrefix: config.podModulePrefix,
+  Resolver
+});
+
+loadInitializers(App, config.modulePrefix);
+
+export default App;

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/breakpoints.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/breakpoints.js b/contrib/views/hive20/src/main/resources/ui/app/breakpoints.js
new file mode 100644
index 0000000..f358473
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/breakpoints.js
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export default {
+  mobile:  '(max-width: 767px)',
+  tablet:  '(min-width: 768px) and (max-width: 991px)',
+  desktop: '(min-width: 992px) and (max-width: 1200px)'
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/components/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/.gitkeep b/contrib/views/hive20/src/main/resources/ui/app/components/.gitkeep
new file mode 100644
index 0000000..e69de29


[02/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/AsyncJobRunnerImplTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/AsyncJobRunnerImplTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/AsyncJobRunnerImplTest.java
new file mode 100644
index 0000000..9aac3c2
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/AsyncJobRunnerImplTest.java
@@ -0,0 +1,138 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive20;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.actor.HiveActor;
+import org.apache.ambari.view.hive20.actor.ResultSetIterator;
+import org.apache.ambari.view.hive20.actor.message.Connect;
+import org.apache.ambari.view.hive20.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive20.actor.message.FetchError;
+import org.apache.ambari.view.hive20.actor.message.FetchResult;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.actor.message.job.CancelJob;
+import org.apache.ambari.view.hive20.actor.message.job.Failure;
+import org.apache.ambari.view.hive20.client.AsyncJobRunnerImpl;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.client.NonPersistentCursor;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.hive.jdbc.HiveQueryResultSet;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.sql.ResultSet;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+public class AsyncJobRunnerImplTest {
+
+  private ActorSystem actorSystem;
+
+  @Before
+  public void setUp() throws Exception {
+    actorSystem = ActorSystem.create("TestingActorSystem");
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    JavaTestKit.shutdownActorSystem(actorSystem);
+  }
+
+
+  @Test
+  public void testSubmitJob() throws Exception {
+    ConnectionConfig connectionConfig = createNiceMock(ConnectionConfig.class);
+    SQLStatementJob sqlStatementJob = createNiceMock(SQLStatementJob.class);
+    Job job = createNiceMock(Job.class);
+    Connect connect = createNiceMock(Connect.class);
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ActorRef controller = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    AsyncJobRunnerImpl runner = new AsyncJobRunnerImpl(viewContext, controller, actorSystem);
+    expect(job.getId()).andReturn("1");
+    expect(connect.getJdbcUrl()).andReturn("testjdbc");
+    expect(connectionConfig.createConnectMessage("1")).andReturn(connect);
+    replay(job, connectionConfig);
+    runner.submitJob(connectionConfig, sqlStatementJob, job);
+    verify(job, connectionConfig);
+  }
+
+  @Test
+  public void testCancelJob() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ActorRef controller = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    AsyncJobRunnerImpl runner = new AsyncJobRunnerImpl(viewContext, controller, actorSystem);
+    runner.cancelJob("1", "test");
+  }
+
+  @Test
+  public void testGetCursor() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ActorRef controller = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    AsyncJobRunnerImpl runner = new AsyncJobRunnerImpl(viewContext, controller, actorSystem);
+    Optional<NonPersistentCursor> cursor = runner.getCursor("1", "test");
+    assertTrue(cursor.isPresent());
+  }
+
+
+  @Test
+  public void testGetError() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ActorRef controller = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    AsyncJobRunnerImpl runner = new AsyncJobRunnerImpl(viewContext, controller, actorSystem);
+    Optional<Failure> failure = runner.getError("1", "test");
+    assertTrue(failure.isPresent());
+    assertEquals("failure", failure.get().getMessage());
+  }
+
+  private static class TestParent extends HiveActor {
+
+    @Override
+    public void handleMessage(HiveMessage hiveMessage) {
+      if (hiveMessage.getMessage() instanceof ExecuteJob) {
+        ExecuteJob executeJob = (ExecuteJob) hiveMessage.getMessage();
+        assertEquals(executeJob.getConnect().getJdbcUrl(), "testjdbc");
+      }
+      if (hiveMessage.getMessage() instanceof CancelJob) {
+        CancelJob cancelJob = (CancelJob) hiveMessage.getMessage();
+        assertEquals("1", cancelJob.getJobId());
+        assertEquals("test", cancelJob.getUsername());
+      }
+      if (hiveMessage.getMessage() instanceof FetchError) {
+        sender().tell(Optional.of(new Failure("failure", new NullPointerException())), self());
+      }
+      if (hiveMessage.getMessage() instanceof FetchResult) {
+        ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+        ActorRef rsi = context().actorOf(
+                Props.create(ResultSetIterator.class, self(), resultSet));
+        sender().tell(Optional.of(rsi), self());
+      }
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ConnectionFailuresTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ConnectionFailuresTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ConnectionFailuresTest.java
new file mode 100644
index 0000000..996efd4
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ConnectionFailuresTest.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.actor.DeathWatch;
+import org.apache.ambari.view.hive20.actor.OperationController;
+import org.apache.ambari.view.hive20.actor.message.Connect;
+import org.apache.ambari.view.hive20.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive20.actor.message.HiveJob;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.internal.ConnectionSupplier;
+import org.apache.ambari.view.hive20.internal.DataStorageSupplier;
+import org.apache.ambari.view.hive20.internal.HdfsApiSupplier;
+import org.apache.ambari.view.hive20.internal.HiveConnectionWrapper;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.HiveQueryResultSet;
+import org.apache.hive.jdbc.HiveStatement;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.HashMap;
+
+import static org.easymock.EasyMock.*;
+
+public class ConnectionFailuresTest {
+
+  private ActorSystem actorSystem;
+
+  @Before
+  public void setUp() throws Exception {
+      actorSystem = ActorSystem.create("TestingActorSystem");
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    JavaTestKit.shutdownActorSystem(actorSystem);
+  }
+
+  @Test
+  public void testConnectionFailure() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ConnectionSupplier connectionSupplier = createNiceMock(ConnectionSupplier.class);
+    DataStorageSupplier dataStorageSupplier = createNiceMock(DataStorageSupplier.class);
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    HdfsApiSupplier hdfsApiSupplier = createNiceMock(HdfsApiSupplier.class);
+    Connect connect = createNiceMock(Connect.class);
+    Storage storage = createNiceMock(Storage.class);
+    JobImpl jobImpl = createNiceMock(JobImpl.class);
+    ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    HiveStatement statement = createNiceMock(HiveStatement.class);
+    ConnectionDelegate delegate = createNiceMock(ConnectionDelegate.class);
+    HiveConnectionWrapper connectionWrapper = createNiceMock(HiveConnectionWrapper.class);
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveJob test = new SQLStatementJob(HiveJob.Type.ASYNC, new String[]{"select * from test"}, "test", "1", "test.log");
+    ExecuteJob executeJob = new ExecuteJob(connect, test);
+    ActorRef deathwatch = actorSystem.actorOf(Props.create(DeathWatch.class));
+    ActorRef operationControl = actorSystem.actorOf(
+            Props.create(OperationController.class, actorSystem, deathwatch, viewContext, connectionSupplier, dataStorageSupplier, hdfsApiSupplier), "operationController-test");
+    expect(hdfsApiSupplier.get(viewContext)).andReturn(Optional.of(hdfsApi));
+    expect(viewContext.getInstanceName()).andReturn("test").anyTimes();
+    expect(viewContext.getProperties()).andReturn(new HashMap<String, String>()).anyTimes();
+    expect(connect.getConnectable(anyObject(AuthParams.class))).andReturn(connectionWrapper);
+    expect(connectionWrapper.isOpen()).andReturn(false).anyTimes();
+    expect(connectionWrapper.getConnection()).andReturn(Optional.<HiveConnection>absent()).anyTimes();
+    expect(dataStorageSupplier.get(viewContext)).andReturn(storage);
+    expect(connectionSupplier.get(viewContext)).andReturn(delegate);
+    expect(storage.load(JobImpl.class, "1")).andReturn(jobImpl).anyTimes();
+    expect(jobImpl.getDateSubmitted()).andReturn(0L).times(1);
+    connectionWrapper.connect();
+    jobImpl.setStatus(Job.JOB_STATE_ERROR);
+    storage.store(JobImpl.class, jobImpl);
+    replay(viewContext, connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+
+    operationControl.tell(executeJob, ActorRef.noSender());
+    Thread.sleep(5000);
+    verify(connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+
+  }
+
+  @Test
+  public void testExecutionFailure() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ConnectionSupplier connectionSupplier = createNiceMock(ConnectionSupplier.class);
+    DataStorageSupplier dataStorageSupplier = createNiceMock(DataStorageSupplier.class);
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    HdfsApiSupplier hdfsApiSupplier = createNiceMock(HdfsApiSupplier.class);
+    Connect connect = createNiceMock(Connect.class);
+    Storage storage = createNiceMock(Storage.class);
+    JobImpl jobImpl = createNiceMock(JobImpl.class);
+    ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    HiveStatement statement = createNiceMock(HiveStatement.class);
+    ConnectionDelegate delegate = createNiceMock(ConnectionDelegate.class);
+    HiveConnectionWrapper connectionWrapper = createNiceMock(HiveConnectionWrapper.class);
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveJob test = new SQLStatementJob(HiveJob.Type.ASYNC, new String[]{"select * from test"}, "test", "1", "test.log");
+    ExecuteJob executeJob = new ExecuteJob(connect, test);
+    ActorRef deathwatch = actorSystem.actorOf(Props.create(DeathWatch.class));
+    ActorRef operationControl = actorSystem.actorOf(
+            Props.create(OperationController.class, actorSystem, deathwatch, viewContext, connectionSupplier, dataStorageSupplier, hdfsApiSupplier), "operationController-test");
+    expect(hdfsApiSupplier.get(viewContext)).andReturn(Optional.of(hdfsApi));
+    expect(viewContext.getProperties()).andReturn(new HashMap<String, String>()).anyTimes();
+    expect(connect.getConnectable(anyObject(AuthParams.class))).andReturn(connectionWrapper);
+    expect(connectionWrapper.isOpen()).andReturn(false);
+    expect(connectionWrapper.getConnection()).andReturn(Optional.of(hiveConnection)).anyTimes();
+    expect(dataStorageSupplier.get(viewContext)).andReturn(storage);
+    expect(connectionSupplier.get(viewContext)).andReturn(delegate);
+    expect(storage.load(JobImpl.class, "1")).andReturn(jobImpl).anyTimes();
+    expect(delegate.createStatement(hiveConnection)).andReturn(statement);
+    expect(delegate.execute("select * from test")).andThrow(new SQLException("Syntax error"));
+    expect(jobImpl.getDateSubmitted()).andReturn(0L).times(2);
+    jobImpl.setStatus(Job.JOB_STATE_RUNNING);
+    storage.store(JobImpl.class, jobImpl);
+    connectionWrapper.connect();
+    jobImpl.setStatus(Job.JOB_STATE_ERROR);
+    storage.store(JobImpl.class, jobImpl);
+    replay(viewContext, connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+
+    operationControl.tell(executeJob, ActorRef.noSender());
+    Thread.sleep(5000);
+    verify(connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegateTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegateTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegateTest.java
new file mode 100644
index 0000000..1d43fdc
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/HiveJdbcConnectionDelegateTest.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.actor.message.GetColumnMetadataJob;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.HiveQueryResultSet;
+import org.apache.hive.jdbc.HiveStatement;
+import org.junit.Test;
+
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.assertEquals;
+
+public class HiveJdbcConnectionDelegateTest {
+
+
+  @Test
+  public void testCreateStatement() throws SQLException {
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveStatement hiveStatement = createNiceMock(HiveStatement.class);
+    expect(hiveConnection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)).andReturn(hiveStatement);
+    replay(hiveConnection);
+    HiveStatement statement = new HiveJdbcConnectionDelegate().createStatement(hiveConnection);
+    assertEquals(hiveStatement, statement);
+
+  }
+
+
+  @Test
+  public void testExecute() throws SQLException {
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveStatement hiveStatement = createNiceMock(HiveStatement.class);
+    HiveQueryResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    expect(hiveConnection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)).andReturn(hiveStatement);
+    String query = "select * from test";
+    expect(hiveStatement.execute(query)).andReturn(true);
+    expect(hiveStatement.getResultSet()).andReturn(resultSet);
+    replay(hiveConnection, hiveStatement, resultSet);
+    HiveJdbcConnectionDelegate hiveJdbcConnectionDelegate = new HiveJdbcConnectionDelegate();
+    Optional<ResultSet> execute = hiveJdbcConnectionDelegate.execute(hiveConnection, query);
+    assertEquals(execute.get(), resultSet);
+    verify(hiveConnection, hiveStatement, resultSet);
+
+  }
+
+
+  @Test
+  public void testGetColumnMetaData() throws SQLException {
+
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    DatabaseMetaData metadata = createNiceMock(DatabaseMetaData.class);
+    expect(hiveConnection.getMetaData()).andReturn(metadata);
+    ResultSet resultSet = createNiceMock(ResultSet.class);
+    expect(metadata.getColumns(anyString(), anyString(), anyString(), anyString())).andReturn(resultSet);
+    replay(hiveConnection, metadata, resultSet);
+    HiveJdbcConnectionDelegate hiveJdbcConnectionDelegate = new HiveJdbcConnectionDelegate();
+    ResultSet columnMetadata = hiveJdbcConnectionDelegate.getColumnMetadata(hiveConnection, new GetColumnMetadataJob("", "", "", ""));
+    assertEquals(resultSet, columnMetadata);
+    verify(hiveConnection, metadata, resultSet);
+  }
+
+
+  @Test
+  public void testCancel() throws SQLException {
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveStatement hiveStatement = createNiceMock(HiveStatement.class);
+    HiveQueryResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    expect(hiveConnection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)).andReturn(hiveStatement);
+    String query = "select * from test";
+    expect(hiveStatement.execute(query)).andReturn(true);
+    expect(hiveStatement.getResultSet()).andReturn(resultSet);
+    hiveStatement.cancel();
+    resultSet.close();
+    replay(hiveConnection, hiveStatement, resultSet);
+    HiveJdbcConnectionDelegate hiveJdbcConnectionDelegate = new HiveJdbcConnectionDelegate();
+    hiveJdbcConnectionDelegate.execute(hiveConnection, query);
+    hiveJdbcConnectionDelegate.cancel();
+    hiveJdbcConnectionDelegate.closeResultSet();
+    hiveJdbcConnectionDelegate.closeStatement();
+    verify(hiveConnection, hiveStatement, resultSet);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/JobExecutionTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/JobExecutionTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/JobExecutionTest.java
new file mode 100644
index 0000000..94ebb27
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/JobExecutionTest.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
+import com.beust.jcommander.internal.Lists;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.actor.DeathWatch;
+import org.apache.ambari.view.hive20.actor.OperationController;
+import org.apache.ambari.view.hive20.actor.message.Connect;
+import org.apache.ambari.view.hive20.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive20.actor.message.HiveJob;
+import org.apache.ambari.view.hive20.actor.message.SQLStatementJob;
+import org.apache.ambari.view.hive20.internal.ConnectionSupplier;
+import org.apache.ambari.view.hive20.internal.DataStorageSupplier;
+import org.apache.ambari.view.hive20.internal.HdfsApiSupplier;
+import org.apache.ambari.view.hive20.internal.HiveConnectionWrapper;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.HiveQueryResultSet;
+import org.apache.hive.jdbc.HiveStatement;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.sql.ResultSet;
+import java.util.HashMap;
+
+import static org.easymock.EasyMock.*;
+
+public class JobExecutionTest {
+
+  private ActorSystem actorSystem;
+
+  @Before
+  public void setUp() throws Exception {
+    actorSystem = ActorSystem.create("TestingActorSystem");
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    JavaTestKit.shutdownActorSystem(actorSystem);
+  }
+
+  @Test
+  public void testExecuteJob() throws Exception {
+    ViewContext viewContext = createNiceMock(ViewContext.class);
+    ConnectionSupplier connectionSupplier = createNiceMock(ConnectionSupplier.class);
+    DataStorageSupplier dataStorageSupplier = createNiceMock(DataStorageSupplier.class);
+    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
+    HdfsApiSupplier hdfsApiSupplier = createNiceMock(HdfsApiSupplier.class);
+    Connect connect = createNiceMock(Connect.class);
+    Storage storage = createNiceMock(Storage.class);
+    JobImpl jobImpl = createNiceMock(JobImpl.class);
+    ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    HiveStatement statement = createNiceMock(HiveStatement.class);
+    ConnectionDelegate delegate = createNiceMock(ConnectionDelegate.class);
+    HiveConnectionWrapper connectionWrapper = createNiceMock(HiveConnectionWrapper.class);
+    HiveConnection hiveConnection = createNiceMock(HiveConnection.class);
+    HiveJob test = new SQLStatementJob(HiveJob.Type.ASYNC, new String[]{"select * from test"}, "test", "1", "test.log");
+    ExecuteJob executeJob = new ExecuteJob(connect, test);
+    ActorRef deathwatch = actorSystem.actorOf(Props.create(DeathWatch.class));
+    ActorRef operationControl = actorSystem.actorOf(
+            Props.create(OperationController.class, actorSystem, deathwatch, viewContext, connectionSupplier, dataStorageSupplier, hdfsApiSupplier), "operationController-test");
+    expect(hdfsApiSupplier.get(viewContext)).andReturn(Optional.of(hdfsApi));
+    expect(viewContext.getProperties()).andReturn(new HashMap<String, String>()).anyTimes();
+    expect(connect.getConnectable(anyObject(AuthParams.class))).andReturn(connectionWrapper);
+    expect(connectionWrapper.isOpen()).andReturn(false);
+    expect(connectionWrapper.getConnection()).andReturn(Optional.of(hiveConnection)).anyTimes();
+    expect(dataStorageSupplier.get(viewContext)).andReturn(storage);
+    expect(connectionSupplier.get(viewContext)).andReturn(delegate);
+    expect(storage.load(JobImpl.class, "1")).andReturn(jobImpl).anyTimes();
+    expect(delegate.createStatement(hiveConnection)).andReturn(statement);
+    expect(delegate.execute("select * from test")).andReturn(Optional.of(resultSet));
+    expect(statement.getQueryLog()).andReturn(Lists.<String>newArrayList());
+    expect(jobImpl.getDateSubmitted()).andReturn(0L).times(2);
+    jobImpl.setStatus(Job.JOB_STATE_RUNNING);
+    storage.store(JobImpl.class, jobImpl);
+    connectionWrapper.connect();
+    jobImpl.setStatus(Job.JOB_STATE_FINISHED);
+    storage.store(JobImpl.class, jobImpl);
+    replay(viewContext, connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+
+    operationControl.tell(executeJob, ActorRef.noSender());
+    Thread.sleep(5000);
+    verify(connect, hdfsApiSupplier, dataStorageSupplier, connectionWrapper,
+            storage, jobImpl, connectionSupplier, delegate, statement, resultSet);
+
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/PropertyValidatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/PropertyValidatorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/PropertyValidatorTest.java
new file mode 100644
index 0000000..d9c4b2a
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/PropertyValidatorTest.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+import org.apache.ambari.view.ViewInstanceDefinition;
+import org.apache.ambari.view.validation.Validator;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+public class PropertyValidatorTest {
+
+  @Test
+  public void testValidatePropertyWebHDFSCom() throws Exception {
+    PropertyValidator validator = new PropertyValidator();
+    ViewInstanceDefinition definition = getViewInstanceDefinition();
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "hdfs://hostname.com:8020");
+
+    assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "webhdfs://hostname.com:50070");
+
+    assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "http://hostname.com:50070");
+
+    assertFalse(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+  }
+
+  @Test
+  public void testValidatePropertyWebHDFSInternal() throws Exception {
+    PropertyValidator validator = new PropertyValidator();
+    ViewInstanceDefinition definition = getViewInstanceDefinition();
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "hdfs://hostname.internal:8020");
+
+    assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "webhdfs://hostname.internal:50070");
+
+    assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "swebhdfs://hostname.internal:50070");
+
+    assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+
+    definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "http://hostname.internal:50070");
+
+    assertFalse(validator.validateProperty(PropertyValidator.WEBHDFS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+  }
+
+  @Test
+  public void testValidatePropertyATSCom() throws Exception {
+    PropertyValidator validator = new PropertyValidator();
+    ViewInstanceDefinition definition = getViewInstanceDefinition();
+
+    definition.getPropertyMap().put(PropertyValidator.YARN_ATS_URL, "http://hostname.com:8088");
+
+    assertTrue(validator.validateProperty(PropertyValidator.YARN_ATS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+  }
+
+  @Test
+  public void testValidatePropertyATSInternal() throws Exception {
+    PropertyValidator validator = new PropertyValidator();
+    ViewInstanceDefinition definition = getViewInstanceDefinition();
+
+    definition.getPropertyMap().put(PropertyValidator.YARN_ATS_URL, "http://hostname.internal:8088");
+
+    assertTrue(validator.validateProperty(PropertyValidator.YARN_ATS_URL,
+            definition, Validator.ValidationContext.PRE_CREATE).isValid());
+  }
+
+  private ViewInstanceDefinition getViewInstanceDefinition() {
+    ViewInstanceDefinition definition = EasyMock.createNiceMock(ViewInstanceDefinition.class);
+    expect(definition.getClusterHandle()).andReturn(null).anyTimes();
+    Map<String, String> properties = new HashMap<String, String>();
+    expect(definition.getPropertyMap()).andReturn(properties).anyTimes();
+    replay(definition);
+    return definition;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ResultSetIteratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ResultSetIteratorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ResultSetIteratorTest.java
new file mode 100644
index 0000000..f7db199
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/ResultSetIteratorTest.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive20;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
+import org.apache.ambari.view.hive20.actor.HiveActor;
+import org.apache.ambari.view.hive20.actor.ResultSetIterator;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.ResetCursor;
+import org.apache.ambari.view.hive20.actor.message.job.Next;
+import org.apache.hive.jdbc.HiveQueryResultSet;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+
+import static org.easymock.EasyMock.*;
+
+
+public class ResultSetIteratorTest {
+
+  private ActorSystem actorSystem;
+
+  @Before
+  public void setUp() throws Exception {
+    actorSystem = ActorSystem.create("TestingActorSystem");
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    JavaTestKit.shutdownActorSystem(actorSystem);
+  }
+
+  @Test
+  public void testGetNext() throws Exception {
+    ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+    ResultSetMetaData resultSetMetaData = createNiceMock(ResultSetMetaData.class);
+
+    ActorRef parent = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    ActorRef rsi = actorSystem.actorOf(
+            Props.create(ResultSetIterator.class, parent, resultSet));
+    expect(resultSet.getMetaData()).andReturn(resultSetMetaData);
+    expect(resultSetMetaData.getColumnCount()).andReturn(1);
+    expect(resultSetMetaData.getColumnName(1)).andReturn("test");
+    expect(resultSetMetaData.getColumnTypeName(1)).andReturn("string");
+    replay(resultSet, resultSetMetaData);
+    rsi.tell(new Next(), parent);
+    Thread.sleep(2000);
+    verify(resultSet, resultSetMetaData);
+
+  }
+
+  @Test
+  public void testResetCursor() throws Exception {
+    ResultSet resultSet = createNiceMock(HiveQueryResultSet.class);
+
+    ActorRef parent = actorSystem.actorOf(
+            Props.create(TestParent.class));
+    ActorRef rsi = actorSystem.actorOf(
+            Props.create(ResultSetIterator.class, parent, resultSet));
+    resultSet.beforeFirst();
+    replay(resultSet);
+    rsi.tell(new ResetCursor(), parent);
+    Thread.sleep(2000);
+    verify(resultSet);
+
+  }
+
+
+  private static class TestParent extends HiveActor {
+
+    @Override
+    public void handleMessage(HiveMessage hiveMessage) {
+
+    }
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtilsTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtilsTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtilsTest.java
new file mode 100644
index 0000000..86170e7
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtilsTest.java
@@ -0,0 +1,66 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.ambari.view.hive20.internal.parsers;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.List;
+
+public class ParserUtilsTest {
+
+  @Test
+  public void parseColumnDataTypeDecimalTest(){
+    String columnDataTypeString = " decimal(10,2) ";
+    List<String> list = ParserUtils.parseColumnDataType(columnDataTypeString);
+    Assert.assertEquals("Must contain 3 elements : ", 3, list.size());
+    Assert.assertEquals("Failed to find datatype. ", "decimal", list.get(0));
+    Assert.assertEquals("Failed to find precision. ", "10", list.get(1));
+    Assert.assertEquals("Failed to find scale. ", "2", list.get(2));
+  }
+
+  @Test
+  public void parseColumnDataTypeDecimalWithSpaceTest(){
+    String columnDataTypeString = " decimal ( 10 ,   2 ) ";
+    List<String> list = ParserUtils.parseColumnDataType(columnDataTypeString);
+    Assert.assertEquals("Must contain 3 elements : ", 3, list.size());
+    Assert.assertEquals("Failed to find datatype. ", "decimal", list.get(0));
+    Assert.assertEquals("Failed to find precision. ", "10", list.get(1));
+    Assert.assertEquals("Failed to find scale. ", "2", list.get(2));
+  }
+
+  @Test
+  public void parseColumnDataTypeVarcharTest(){
+    String columnDataTypeString = " VARCHAR( 10)  ";
+    List<String> list = ParserUtils.parseColumnDataType(columnDataTypeString);
+    Assert.assertEquals("Must contain 2 elements : ", 3, list.size());
+    Assert.assertEquals("Failed to find datatype. ", "VARCHAR", list.get(0));
+    Assert.assertEquals("Failed to find precision. ", "10", list.get(1));
+    Assert.assertNull("Scale should be null. ", list.get(2));
+  }
+
+  @Test
+  public void parseColumnDataTypeBooleanTest(){
+    String columnDataTypeString = " BOOLEAN  ";
+    List<String> list = ParserUtils.parseColumnDataType(columnDataTypeString);
+    Assert.assertEquals("Must contain 1 elements : ", 3, list.size());
+    Assert.assertEquals("Failed to find datatype. ", "BOOLEAN", list.get(0));
+    Assert.assertNull("Precision should be null. ", list.get(1));
+    Assert.assertNull("Scale should be null. ", list.get(2));
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerationSpecTest.groovy
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerationSpecTest.groovy b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerationSpecTest.groovy
new file mode 100644
index 0000000..874e268
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerationSpecTest.groovy
@@ -0,0 +1,59 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+
+package org.apache.ambari.view.hive20.internal.query.generators
+
+import spock.lang.Specification
+
+class AlterTableQueryGenerationSpecTest extends Specification {
+//  def "alter simple table"() {
+//    // blocks go here
+//    setup:
+//    def oldTableMeta = new TableMeta()
+//    def newTableMeta = new TableMeta()
+//    def oldCols = new ArrayList<>();
+//    oldCols.add(new ColumnInfo())
+//    oldTableMeta.setColumns()
+//
+//    when:
+//    stack.push(elem)
+//
+//    then:
+//    println "inside AlterTableQueryGenerationSpecTest"
+//    !stack.empty
+//    stack.size() == 1
+//    stack.peek() == elem
+//  }
+//
+//  def "pushing again an element on the stack"() {
+//    // blocks go here
+//    setup:
+//    def stack = new Stack()
+//    def elem = "push me"
+//
+//    when:
+//    stack.push(elem)
+//
+//    then:
+//    println "inside AlterTableQueryGenerationSpecTest"
+//    !stack.empty
+//    stack.size() == 1
+//    stack.peek() == elem
+//  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGeneratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGeneratorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGeneratorTest.java
new file mode 100644
index 0000000..45f29da
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGeneratorTest.java
@@ -0,0 +1,94 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class AlterTableQueryGeneratorTest {
+  @Test
+  public void getQuery() throws Exception {
+
+  }
+
+  @Test
+  public void generateColumnQuery() throws Exception {
+
+  }
+
+  @Test
+  public void createColumnQueriesForSuccessfulChangeColumn() throws Exception {
+    ColumnInfo colInfo1 = new ColumnInfo("col1", "CHAR(1)", "COMMENT 1"); // with comment
+    ColumnInfo colInfo2 = new ColumnInfo("col2", "DECIMAL(10,5)"); // no comment
+    ColumnInfo colInfo3 = new ColumnInfo("col3", "STRING", "COMMENT-3");
+    ColumnInfo colInfo4 = new ColumnInfo("col4", "VARCHAR(10)", "COMMENT 4");
+    ColumnInfo colInfo5 = new ColumnInfo("col5", "STRING", "COMMENT 5");
+    ColumnInfo colInfo6 = new ColumnInfo("col6", "INT");
+    List<ColumnInfo> oldColumns = Arrays.asList(colInfo1, colInfo2, colInfo3);
+    List<ColumnInfo> newColumns = Arrays.asList(colInfo4, colInfo5, colInfo6); // all changed
+    Optional<List<String>> query = AlterTableQueryGenerator.createColumnQueries(oldColumns, newColumns, false);
+
+    Assert.assertTrue(query.isPresent());
+    List<String> queries = query.get();
+
+    Assert.assertEquals("Expected number of column update queries were different.", 3, queries.size());
+    String[] expectedQueries = new String[]{" CHANGE COLUMN `col1` `col4` VARCHAR(10) COMMENT \'COMMENT 4\'", " CHANGE COLUMN `col2` `col5` STRING COMMENT \'COMMENT 5\'", " CHANGE COLUMN `col3` `col6` INT"};
+
+    Assert.assertArrayEquals("Column change queries were not equal ", expectedQueries, queries.toArray());
+  }
+
+  @Test
+  public void createColumnQueriesForSuccessfulChangeAndAddColumn() throws Exception {
+
+    TableMeta oldMeta = new TableMeta();
+    TableMeta newMeta = new TableMeta();
+
+    ColumnInfo colInfo1 = new ColumnInfo("col1", "CHAR(1)", "COMMENT 1"); // with comment
+    ColumnInfo colInfo2 = new ColumnInfo("col2", "DECIMAL(10,5)"); // no comment
+    ColumnInfo colInfo3 = new ColumnInfo("col3", "STRING", "COMMENT-3");
+    ColumnInfo colInfo4 = new ColumnInfo("col4", "VARCHAR(10)", "COMMENT 4");
+    ColumnInfo colInfo5 = new ColumnInfo("col5", "STRING", "COMMENT 5");
+    ColumnInfo colInfo6 = new ColumnInfo("col6", "INT");
+    ColumnInfo colInfo7 = new ColumnInfo("col7", "DATE");
+    ColumnInfo colInfo8 = new ColumnInfo("col8", "BOOLEAN", "COMMENT 8");
+
+    List<ColumnInfo> oldColumns = Arrays.asList(colInfo1, colInfo2, colInfo3);
+    oldMeta.setColumns(oldColumns);
+
+    List<ColumnInfo> newColumns = Arrays.asList(colInfo4, colInfo5, colInfo6, colInfo7, colInfo8); // all changed
+    oldMeta.setColumns(newColumns);
+
+    Optional<List<String>> query = AlterTableQueryGenerator.createColumnQueries(oldColumns, newColumns, false);
+
+    Assert.assertTrue(query.isPresent());
+    List<String> queries = query.get();
+
+    Assert.assertEquals("Expected number of column update queries were different.", 4, queries.size());
+    System.out.println(queries);
+    String[] expectedQueries = new String[]{" CHANGE COLUMN `col1` `col4` VARCHAR(10) COMMENT \'COMMENT 4\'", " CHANGE COLUMN `col2` `col5` STRING COMMENT \'COMMENT 5\'", " CHANGE COLUMN `col3` `col6` INT"," ADD COLUMNS ( `col7` DATE, `col8` BOOLEAN COMMENT \'COMMENT 8\' )" };
+
+    Assert.assertArrayEquals("Column change queries were not equal ", expectedQueries, queries.toArray());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGeneratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGeneratorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGeneratorTest.java
new file mode 100644
index 0000000..f5a9778
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGeneratorTest.java
@@ -0,0 +1,101 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Optional;
+import com.google.gson.Gson;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.junit.Assert;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class CreateTableQueryGeneratorTest {
+  private static final Logger LOG = LoggerFactory.getLogger(CreateTableQueryGeneratorTest.class);
+
+  @Test
+  public void testGetQuery() throws Exception {
+    String json = "{\n" +
+      "\t\"id\": \"d1/t2\",\n" +
+      "\t\"database\": \"d1\",\n" +
+      "\t\"table\": \"t2\",\n" +
+      "\t\"columns\": [{\n" +
+      "\t\t\"name\": \"col_name1\",\n" +
+      "\t\t\"type\": \"string\",\n" +
+      "\t\t\"comment\": \"col_name1 comment\"\n" +
+      "\t}, {\n" +
+      "\t\t\"name\": \"col_name2\",\n" +
+      "\t\t\"type\": \"decimal(10,2)\",\n" +
+      "\t\t\"comment\": \"col_name2 comment\"\n" +
+      "\t}],\n" +
+      "\t\"ddl\": \"CREATE TABLE `t2`(\\n  `col_name1` string COMMENT \\u0027col_name1 comment\\u0027, \\n  `col_name2` decimal(10,2) COMMENT \\u0027col_name2 comment\\u0027)\\nCOMMENT \\u0027table t1 comment\\u0027\\nPARTITIONED BY ( \\n  `col_name3` string COMMENT \\u0027col_name3 comment\\u0027, \\n  `col_name4` char(1) COMMENT \\u0027col_name4 comment\\u0027)\\nCLUSTERED BY ( \\n  col_name1, \\n  col_name2) \\nSORTED BY ( \\n  col_name1 ASC, \\n  col_name2 DESC) \\nINTO 5 BUCKETS\\nROW FORMAT DELIMITED \\n  FIELDS TERMINATED BY \\u0027,\\u0027 \\nWITH SERDEPROPERTIES ( \\n  \\u0027escape.delim\\u0027\\u003d\\u0027\\\\\\\\\\u0027) \\nSTORED AS INPUTFORMAT \\n  \\u0027org.apache.hadoop.mapred.SequenceFileInputFormat\\u0027 \\nOUTPUTFORMAT \\n  \\u0027org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat\\u0027\\nLOCATION\\n  \\u0027hdfs://c6401.ambari.apache.org:8020/user/hive/tables/d1/t1\\u0027\\nTBLPROPERTIES (\\n  \\u0027NO_AUTO_COMPACTION\\u0027\\u003d\\u0027true\\u0027, 
 \\n  \\u0027immutable\\u0027\\u003d\\u0027false\\u0027, \\n  \\u0027orc.compress\\u0027\\u003d\\u0027SNAPPY\\u0027, \\n  \\u0027transient_lastDdlTime\\u0027\\u003d\\u00271481520077\\u0027)\\n\",\n" +
+      "\t\"partitionInfo\": {\n" +
+      "\t\t\"columns\": [{\n" +
+      "\t\t\t\"name\": \"col_name4\",\n" +
+      "\t\t\t\"type\": \"char(1)\",\n" +
+      "\t\t\t\"comment\": \"col_name4 comment\"\n" +
+      "\t\t}, {\n" +
+      "\t\t\t\"name\": \"col_name3\",\n" +
+      "\t\t\t\"type\": \"string\",\n" +
+      "\t\t\t\"comment\": \"col_name3 comment\"\n" +
+      "\t\t}]\n" +
+      "\t},\n" +
+      "\t\"detailedInfo\": {\n" +
+      "\t\t\"dbName\": \"d1\",\n" +
+      "\t\t\"owner\": \"admin\",\n" +
+      "\t\t\"createTime\": \"Mon Dec 12 05:21:17 UTC 2016\",\n" +
+      "\t\t\"lastAccessTime\": \"UNKNOWN\",\n" +
+      "\t\t\"retention\": \"0\",\n" +
+      "\t\t\"tableType\": \"MANAGED_TABLE\",\n" +
+      "\t\t\"location\": \"hdfs://c6401.ambari.apache.org:8020/user/hive/tables/d1/t1\",\n" +
+      "\t\t\"parameters\": {\n" +
+      "\t\t\t\"immutable\": \"false\",\n" +
+      "\t\t\t\"orc.compress\": \"SNAPPY\",\n" +
+      "\t\t\t\"transient_lastDdlTime\": \"1481520077\",\n" +
+      "\t\t\t\"NO_AUTO_COMPACTION\": \"true\",\n" +
+      "\t\t\t\"comment\": \"table t1 comment\",\n" +
+      "\t\t\t\"SORTBUCKETCOLSPREFIX\": \"TRUE\"\n" +
+      "\t\t}\n" +
+      "\t},\n" +
+      "\t\"storageInfo\": {\n" +
+      "\t\t\"serdeLibrary\": \"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe\",\n" +
+      "\t\t\"inputFormat\": \"org.apache.hadoop.mapred.SequenceFileInputFormat\",\n" +
+      "\t\t\"outputFormat\": \"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat\",\n" +
+      "\t\t\"compressed\": \"No\",\n" +
+      "\t\t\"numBuckets\": \"5\",\n" +
+      "\t\t\"bucketCols\": [\"col_name1\", \" col_name2\"],\n" +
+      "\t\t\"sortCols\": [{\n" +
+      "\t\t\t\"columnName\": \"col_name1\",\n" +
+      "\t\t\t\"order\": \"ASC\"\n" +
+      "\t\t}, {\n" +
+      "\t\t\t\"columnName\": \"col_name2\",\n" +
+      "\t\t\t\"order\": \"DESC\"\n" +
+      "\t\t}],\n" +
+      "\t\t\"parameters\": {\n" +
+      "\t\t\t\"escape.delim\": \"\\\\\\\\\",\n" +
+      "\t\t\t\"field.delim\": \",\",\n" +
+      "\t\t\t\"serialization.format\": \",\"\n" +
+      "\t\t}\n" +
+      "\t}\n" +
+      "}";
+    TableMeta tableMeta = new Gson().fromJson(json, TableMeta.class);
+    Optional<String> createQuery = new CreateTableQueryGenerator(tableMeta).getQuery();
+    LOG.info("createQuery : {}", createQuery);
+    Assert.assertTrue(createQuery.isPresent());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/CSVParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/CSVParserTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/CSVParserTest.java
new file mode 100644
index 0000000..8f0ac47
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/CSVParserTest.java
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.csv.commonscsv.CSVParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class CSVParserTest {
+
+  /**
+   * no exception in creating csvParser with emtpy stream
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyStream() throws Exception {
+    String csv = "";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
+      ) {
+      Assert.assertEquals("There should not be any rows.",false, jp.iterator().hasNext());
+    }
+  }
+
+  /**
+   * in case of csv an empty line is still considered as row
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyRow() throws Exception {
+    String csv = "       ";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should not be empty",new Object[]{"       "},iterator.next().getRow());
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws Exception {
+    String csv = "value1,c,10,10.1";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws Exception {
+
+    String csv = "value1,c,10,10.1\n" +
+            "value2,c2,102,true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }
+  }
+
+
+  @Test
+  public void testQuotedEndline() throws Exception {
+
+    String csv = "\"row1-\ncol1\",1,1.1\n\"row2-\\\ncol1\",2,2.2\n";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"row1-\ncol1", "1", "1.1"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+
+      Row row2 = new Row(new Object[]{"row2-\\\ncol1", "2", "2.2"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row2, iterator.next());
+
+    }
+  }
+
+  @Test
+  public void testQuotedDoubleQuote() throws Exception {
+
+    String csv = "\"aaa\",\"b\"\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpecialEscape() throws Exception {
+
+    String csv = "\"aaa\",\"b$\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpecialEscapedEscape() throws Exception {
+
+    String csv = "aaa,b$$bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b$bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void test001Escape() throws Exception {
+
+    String csv = "aaa,b\001\"bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());    }
+  }
+
+  @Test
+  public void testSpecialQuote() throws Exception {
+
+    String csv = "\001aaa\001,\001b\001\001bb\001,\001ccc\001";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_QUOTE,'\001');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b\001bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpaceAsDelimiterAndQuoted() throws Exception {
+
+    String csv = "aaa \"b bb\" ccc\naaa2 bbb2 \"c cc2\"";
+    ParseOptions po = new ParseOptions();
+//    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001');
+    po.setOption(ParseOptions.OPTIONS_CSV_DELIMITER,' ');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+
+      Row row2 = new Row(new Object[]{"aaa2", "bbb2", "c cc2"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row2, iterator.next());
+    }
+  }
+
+  @Test
+  public void testFailedDelimiterEscaped() throws Exception {
+
+    String csv = "aaa,b\\,bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\\');
+    po.setOption(ParseOptions.OPTIONS_CSV_DELIMITER,',');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b,bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
new file mode 100644
index 0000000..8006e91
--- /dev/null
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
@@ -0,0 +1,326 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.upload;
+
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive20.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class DataParserCSVTest {
+  @Test
+  public void testParsePreviewCSV() throws Exception {
+    String str = "1,a\n" +
+            "2,b\n" +
+            "3,c\n";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try (
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+    ){
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(2, pd.getPreviewRows().size()); // now it will not return the first row which is header
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
+              new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+
+      Object cols2[] = new Object[2];
+      cols2[0] = "2";
+      cols2[1] = "b";
+      Row row2 = new Row(cols2);
+
+      Object cols3[] = new Object[2];
+      cols3[0] = "3";
+      cols3[1] = "c";
+      Row row3 = new Row(cols3);
+
+      Row[] rows = { row2, row3};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * even if in one of the preview rows, datatype is not correct, then it should be assigned that datatype.
+   * but if first row is header then first row should not be acconted for detecting datatype
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewDataTypeDetectionCSV() throws Exception {
+    String str = "1,a,10,k\n" +
+      "2,b,6,8\n" +
+      "2.2,b,7,9\n" +
+      "2,b,abc,1\n" +
+      "2,b,9,3\n" +
+      "2,b,8,5\n" +
+      "2,b,7,3\n" +
+      "2,b,6,3\n" +
+      "3,c,c,3\n";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    try(StringReader sr = new StringReader(str);
+      DataParser dp= new DataParser(sr, parseOptions)) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(4, pd.getHeader().size());
+      ColumnDescription[] cd = {
+        // as row 3 contains 2.2
+        new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 0),
+        // as all are chars
+        new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1),
+        // as row 4 contains abc
+        new ColumnDescriptionImpl("10", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+        // although row 1 contains k but it is in header and not counted in detecting datatype
+        new ColumnDescriptionImpl("k", ColumnDescriptionShort.DataTypes.INT.toString(), 3)};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+    }
+  }
+
+  /**
+   * even if in one of the preview rows, datatype is not correct, then it should be assigned that datatype.
+   * but if first row is header then first row should not be acconted for detecting datatype
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewDataTypeDetection2CSV() throws Exception {
+    String str = "1,a,10,k\n" +
+      "2,b,6,p\n" +
+      "2.2,b,7,9\n" +
+      "2,b,2.2,1\n" +
+      "2,b,9,3\n" +
+      "2,b,8,5\n" +
+      "2,b,7,3\n" +
+      "2,b,6,3\n" +
+      "3,c,c,3\n";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try(StringReader sr = new StringReader(str);
+        DataParser dp = new DataParser(sr, parseOptions)) {
+
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(4, pd.getHeader().size());
+      ColumnDescription[] cd = {
+        // as row 3 contains 2.2
+        new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 0),
+        // as all are chars
+        new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1),
+        // some are int, char and some double .. nothing other than 'string' satisfies all the rows
+        new ColumnDescriptionImpl("10", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+        // although row 1 contains k but it is in header and not counted in detecting datatype
+        // but row 2 also has a char p which will be acconted for datatype detection
+        new ColumnDescriptionImpl("k", ColumnDescriptionShort.DataTypes.CHAR.toString(), 3)};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+    }
+  }
+
+  /**
+   * One row csv will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowCSV() throws Exception {
+    String str = "1,a\n" ;
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
+        new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "1";
+      cols1[1] = "a";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * One row csv will throw exception in preview if HEADER.FIRST_RECORD is selected.
+   * @throws IOException
+   */
+  @Test(expected = java.util.NoSuchElementException.class)
+  public void testParsePreview1RowCSVFirstRowHeader() throws Exception {
+    String str = "col1,col2\n" ;
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+
+      PreviewData pd = dp.parsePreview();
+    }
+  }
+
+  /**
+   * more number of columns in a row => igore the extra columns. Number of columns is decided by the first row.
+   * If other row contains more columns then those columns will be ignored
+   * Here first row has 2 columns and second row has 3 columns so the value 'x' is ignored
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws Exception {
+    String str = "1,a\n" +
+            "2,b,x\n" +  // contains 3 cols, more number of columns
+            "3,c\n";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try(
+        StringReader sr = new StringReader(str);
+        DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Row row = new Row(new Object[]{"2","b"});
+
+      Assert.assertArrayEquals("Additional columns not properly handled.", row.getRow(),pd.getPreviewRows().get(0).getRow());
+    }
+  }
+
+  /**
+   * less number of columns => treat missing values as null. Number of columns is decided by the first row of the table
+   * if other rows has less number of columns then it treats other columns as null
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws Exception {
+    String str = "1,a\n" +
+            "2\n" +  // contains 1 col, less number of columns
+            "3,c\n";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp =  new DataParser(sr, parseOptions)
+      ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Missing value not detected as null.",pd.getPreviewRows().get(1).getRow()[1],null);
+    }
+  }
+
+  /**
+   * empty values are treated as empty string
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyColumn() throws Exception {
+    String str = "1,a,x\n" +
+            "2,,y\n" +  // contains 1 col, less number of columns
+            "3,c,z\n";
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[1],"");
+    }
+  }
+
+  /**
+   * empty values are treated as empty string
+   * @throws IOException
+   */
+  @Test
+  public void testLastEmptyColumn() throws Exception {
+    String str = "1,a,x\n" +
+            "2,,\n" +  // contains 1 col, less number of columns
+            "3,c,z\n";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[1],"");
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[2],"");
+    }
+  }
+}


[05/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-advanced-settings.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-advanced-settings.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-advanced-settings.hbs
new file mode 100644
index 0000000..18f22f9
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-advanced-settings.hbs
@@ -0,0 +1,226 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="panel panel-info">
+  <div class="panel-heading">
+    <div class="panel-title">
+      {{fa-icon "cog" size="lg"}}
+      &nbsp;&nbsp;&nbsp;Settings
+    </div>
+  </div>
+  <div class="panel-body">
+    <div class="form-horizontal">
+      <div class="form-group">
+        <label class="col-md-2 control-label">Transactional</label>
+        <div class="col-md-4">
+          <label>
+            {{input type="checkbox" checked=settings.transactional}}
+          </label>
+        </div>
+      </div>
+
+      {{#if shouldAddBuckets}}
+        <div class="form-group">
+          <label class="col-md-2 control-label">Number of buckets</label>
+          <div class="col-md-6">
+            <div class="{{if hasNumBucketError 'has-error'}}">
+
+              {{input type="number" class="form-control" value=settings.numBuckets}}
+              {{#if hasNumBucketError}}
+                <span class="help-block">{{numBucketErrorText}}</span>
+              {{/if}}
+            </div>
+          </div>
+        </div>
+      {{/if}}
+    </div>
+  </div>
+
+</div>
+
+<div class="panel panel-info">
+  <div class="panel-heading">
+    <div class="panel-title">
+      <button class="btn btn-primary {{if showLocationInput 'active'}}" {{action "toggleLocation"}}>
+        {{fa-icon (if showLocationInput "minus" "plus")}}
+      </button>
+      &nbsp;&nbsp;&nbsp;Add Location
+    </div>
+  </div>
+  {{#if showLocationInput}}
+    <div class="panel-body">
+      <div class="row">
+        <div class="col-md-6">
+          {{input type="text" class="form-control" value=settings.location}}
+        </div>
+        <button class="btn btn-success" {{action "toggleDirectoryViewer"}}>Select HDFS Directory</button>
+      </div>
+      {{#if showDirectoryViewer}}
+        {{hdfs-viewer-modal
+          showSelectedPath=true
+          close="closeHdfsModal"
+          selected="hdfsPathSelected"
+        }}
+      {{/if}}
+    </div>
+  {{/if}}
+</div>
+
+<div class="panel panel-info">
+  <div class="panel-heading">
+    <div class="panel-title">
+      <button class="btn btn-primary {{if showFileFormatInput 'active'}}" {{action "toggleFileFormat"}}>
+        {{fa-icon (if showFileFormatInput "minus" "plus")}}
+      </button>
+      &nbsp;&nbsp;&nbsp;Add File Format
+    </div>
+  </div>
+  {{#if showFileFormatInput}}
+    <div class="panel-body">
+      <div class="row">
+        <div class="col-md-6">
+          {{#power-select
+            selected=selectedFileFormat
+            options=fileFormats
+            searchField="name"
+            searchPlaceholder="Enter data type"
+            onchange=(action "fileFormatSelected") as |parameter|}}
+            {{parameter.name}}
+          {{/power-select}}
+        </div>
+      </div>
+      {{#if customFileFormat}}
+        <div class="row fileformat-custom-row">
+          <div class="col-md-6 form-horizontal">
+            <div class="form-group">
+              <label class="col-md-3 control-label">Input Format</label>
+              <div class="col-md-9">
+                {{input type="text" class="form-control" value=settings.fileFormat.inputFormat
+                        placeholder="Input format class"}}
+              </div>
+            </div>
+            <div class="form-group">
+              <label class="col-md-3 control-label">Output Format</label>
+              <div class="col-md-9">
+                {{input type="text" class="form-control" value=settings.fileFormat.outputFormat
+                        placeholder="Output format class"}}
+              </div>
+            </div>
+          </div>
+        </div>
+      {{/if}}
+
+    </div>
+  {{/if}}
+</div>
+
+<div class="panel panel-info">
+  <div class="panel-heading">
+    <div class="panel-title">
+      <button class="btn btn-primary {{if showRowFormatInput 'active'}}" {{action "toggleRowFormat"}}>
+        {{fa-icon (if showRowFormatInput "minus" "plus")}}
+      </button>
+      &nbsp;&nbsp;&nbsp;Add Row Format
+    </div>
+  </div>
+  {{#if showRowFormatInput}}
+    <div class="panel-body rowformat-custom-row">
+      <div class="row">
+        <div class="col-md-6 form-horizontal">
+          <div class="form-group">
+            <label class="col-md-4 control-label">Fields Terminated By</label>
+            <div class="col-md-7">
+              {{#power-select
+                selected=selectedFieldTerminator
+                options=terminationChars
+                searchField="name"
+                searchPlaceholder="Enter terminator character"
+                onchange=(action "fieldTerminatorSelected") as |parameter|}}
+                {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+              {{/power-select}}
+            </div>
+            <div class="col-md-1">
+              <a class="text-danger" {{action "clearFieldTerminator"}}>{{fa-icon "times" size="lg"}}</a>
+            </div>
+          </div>
+        </div>
+      </div>
+      <div class="row">
+        <div class="col-md-6 form-horizontal">
+          <div class="form-group">
+            <label class="col-md-4 control-label">Lines Terminated By</label>
+            <div class="col-md-7">
+              {{#power-select
+                selected=selectedLinesTerminator
+                options=terminationChars
+                searchField="name"
+                searchPlaceholder="Enter terminator character"
+                onchange=(action "linesTerminatorSelected") as |parameter|}}
+                {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+              {{/power-select}}
+            </div>
+            <div class="col-md-1">
+              <a class="text-danger" {{action "clearLinesTerminator"}}>{{fa-icon "times" size="lg"}}</a>
+            </div>
+          </div>
+        </div>
+      </div>
+      <div class="row">
+        <div class="col-md-6 form-horizontal">
+          <div class="form-group">
+            <label class="col-md-4 control-label">Null Defined As</label>
+            <div class="col-md-7">
+              {{#power-select
+                selected=selectedNullDefinition
+                options=terminationChars
+                searchField="name"
+                searchPlaceholder="Enter terminator character"
+                onchange=(action "nullDefinedAsSelected") as |parameter|}}
+                {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+              {{/power-select}}
+            </div>
+            <div class="col-md-1">
+              <a class="text-danger" {{action "clearNullDefinition"}}>{{fa-icon "times" size="lg"}}</a>
+            </div>
+          </div>
+        </div>
+      </div>
+      <div class="row">
+        <div class="col-md-6 form-horizontal">
+          <div class="form-group">
+            <label class="col-md-4 control-label">Escape Defined As</label>
+            <div class="col-md-7">
+              {{#power-select
+                selected=selectedEscapeDefinition
+                options=terminationChars
+                searchField="name"
+                searchPlaceholder="Enter terminator chanracter"
+                onchange=(action "escapeDefinedAsSelected") as |parameter|}}
+                {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+              {{/power-select}}
+            </div>
+            <div class="col-md-1">
+              <a class="text-danger" {{action "clearEscapeDefinition"}}>{{fa-icon "times" size="lg"}}</a>
+            </div>
+          </div>
+        </div>
+      </div>
+    </div>
+  {{/if}}
+</div>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-columns.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-columns.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-columns.hbs
new file mode 100644
index 0000000..f5fc547
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-columns.hbs
@@ -0,0 +1,42 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<table class="table table-bordered table-hover">
+  <thead>
+  <tr>
+    <th width="30%">COLUMN NAME</th>
+    <th width="25%">DATA TYPE</th>
+    <th width="25%">SIZE</th>
+    <th width="5%" class="text-center">ADVANCED</th>
+    <th width="15%" class="text-center">ACTION</th>
+  </tr>
+  </thead>
+  <tbody>
+  {{#each columns as |column|}}
+    {{column-item column=column
+                  columnDeleted="columnDeleted"
+                  columnUpdated="columnUpdated"
+    }}
+  {{/each}}
+  <tr class="new-settings text-center">
+    <td colspan="5">
+      <a {{action "addNewColumn"}}>{{fa-icon "plus"}} Add New Column</a>
+    </td>
+  </tr>
+  </tbody>
+</table>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-properties.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-properties.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-properties.hbs
new file mode 100644
index 0000000..0eaab5e
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/table-properties.hbs
@@ -0,0 +1,40 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<table class="table table-bordered table-hover">
+  <thead>
+  <tr>
+    <th width="40%">KEY</th>
+    <th width="40%">VALUE</th>
+    <th width="20%" class="text-center">ACTION</th>
+  </tr>
+  </thead>
+  <tbody>
+  {{#each properties as |property|}}
+    {{property-item property=property
+                  propertyItemDeleted="itemDeleted"
+                  propertyItemUpdated="itemUpdated"
+    }}
+  {{/each}}
+  <tr class="new-settings text-center">
+    <td colspan="3">
+      <a {{action "addNewRow"}}>{{fa-icon "plus"}} Add New Column</a>
+    </td>
+  </tr>
+  </tbody>
+</table>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/tabs-item.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/tabs-item.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/tabs-item.hbs
new file mode 100644
index 0000000..361a359
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/tabs-item.hbs
@@ -0,0 +1,24 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#unless shouldTransition}}
+  <a {{action 'selected'}}>{{fa-icon tab.faIcon}}&nbsp;&nbsp;{{tab.label}}</a>
+{{else}}
+  {{#link-to tab.link eventName='click'}}{{fa-icon tab.faIcon}}&nbsp;&nbsp;{{tab.label}}{{/link-to}}
+{{/unless}}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/tabs-pane.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/tabs-pane.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/tabs-pane.hbs
new file mode 100644
index 0000000..b61d090
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/tabs-pane.hbs
@@ -0,0 +1,21 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#each tabs as |tab|}}
+  {{yield tab}}
+{{/each}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs
new file mode 100644
index 0000000..f930be9
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs
@@ -0,0 +1,25 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<h3 class="clearfix">
+  <strong>HIVE</strong>
+  <span class="pull-right">
+    <button class="btn btn-sm btn-success">{{fa-icon "plus"}} NEW JOB</button>
+    <button class="btn btn-sm btn-success">{{fa-icon "plus"}} NEW TABLE</button>
+  </span>
+</h3>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases.hbs
new file mode 100644
index 0000000..2be061f
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases.hbs
@@ -0,0 +1,39 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{database-search-bar databases=model selected="databaseSelected"}}
+<div class="hv-dropdown database-dropdown">
+  <div class="dropdown">
+    <button class="btn btn-default dropdown-toggle" type="button" id="dropdownMenu1121" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true">
+      {{fa-icon "navicon"}}
+    </button>
+    <ul class="dropdown-menu dropdown-menu-right" aria-labelledby="dropdownMenu1121">
+      <li><a href="#" {{action "dropDatabase"}} class="text-uppercase">{{fa-icon "trash"}} drop database</a></li>
+      <li><a href="#" {{action "createTable"}} class="text-uppercase">{{fa-icon "plus"}} create table</a></li>
+    </ul>
+  </div>
+</div>
+
+{{#if databaseNotEmpty}}
+  {{database-not-empty name=databaseName close="notEmptyDialogClosed"}}
+{{/if}}
+
+{{#if confirmDropDatabase}}
+  {{database-drop-confirm name=databaseName yes="databaseDropConfirmed" no="databaseDropDeclined"}}
+{{/if}}
+{{outlet}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database.hbs
new file mode 100644
index 0000000..941bb77
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database.hbs
@@ -0,0 +1,19 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{outlet}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables.hbs
new file mode 100644
index 0000000..1f98b97
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables.hbs
@@ -0,0 +1,45 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="dipayan123 clearfix">
+  <div class="col-md-3">
+    <div class="row">
+      <div class="hv-dropdown tables-dropdown">
+        {{#link-to "databases.database.tables.new" class="btn btn-success"}}{{fa-icon "plus"}}{{/link-to}}
+      </div>
+
+      {{#list-filter header="tables" items=model
+      placeholder="Search"
+      as |filteredItems|}}
+        {{#list-group class="table-list" items=filteredItems as |item|}}
+          {{list-item item=item itemClicked="tableSelected"}}
+        {{/list-group}}
+      {{/list-filter}}
+
+    </div>
+  </div>
+  <div class="col-md-9">
+    <div class="row">
+      {{outlet}}
+    </div>
+  </div>
+
+</div>
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
new file mode 100644
index 0000000..39b7a9e
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
@@ -0,0 +1,42 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="col-md-12 table-info">
+  {{#if showCreateTableModal}}
+    {{#modal-dialog
+      translucentOverlay=true
+      container-class="modal-dialog modal-sm"}}
+      <div class="modal-content">
+        <div class="modal-header text-danger">
+          <p class="modal-title">{{fa-icon "plus"}}&nbsp;&nbsp;&nbsp; Create table</p>
+        </div>
+        <div class="modal-body text-center text-primary">
+          <p>{{createTableMessage}}</p>
+        </div>
+      </div><!-- /.modal-content -->
+    {{/modal-dialog}}
+  {{/if}}
+  <div class="table-header row">
+    <p class="text-uppercase">table<strong>&nbsp;&nbsp;>&nbsp;&nbsp;create table</strong></p>
+  </div>
+  <div class="table-body">
+    {{create-table tabs=tabs
+                   cancel="cancel"
+                   create="create"}}
+  </div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table.hbs
new file mode 100644
index 0000000..258f687
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table.hbs
@@ -0,0 +1,42 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="col-md-12 table-info">
+  <div class="table-header row">
+    <p class="text-uppercase">table<strong>&nbsp;&nbsp;>&nbsp;&nbsp;{{model.table}}</strong></p>
+    <div class="hv-dropdown tables-dropdown">
+      <div class="dropdown">
+        <button class="btn btn-default dropdown-toggle" type="button" id="dropdownMenu1" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true">
+          {{fa-icon "navicon"}}
+        </button>
+        <ul class="dropdown-menu dropdown-menu-right" aria-labelledby="dropdownMenu1">
+          <li><a href="#" class="text-uppercase">{{fa-icon "edit"}} Edit</a></li>
+          <li><a href="#" class="text-uppercase">{{fa-icon "trash"}} Delete</a></li>
+        </ul>
+      </div>
+    </div>
+  </div>
+  <div class="table-body">
+    {{#tabs-pane tabs=tabs inverse= true as |tab|}}
+      {{tabs-item tab=tab tabs=tabs}}
+    {{/tabs-pane}}
+    {{outlet}}
+  </div>
+
+
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/columns.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/columns.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/columns.hbs
new file mode 100644
index 0000000..f7f01f5
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/columns.hbs
@@ -0,0 +1,38 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="row">
+  <table class="table table-bordered table-hover">
+    <thead>
+    <tr>
+      <th>COLUMN NAME</th>
+      <th>COLUMN TYPE</th>
+      <th>COMMENT</th>
+    </tr>
+    </thead>
+    <tbody>
+    {{#each table.columns as |column|}}
+      <tr>
+        <th>{{column.name}}</th>
+        <td>{{column.type}}</td>
+        <td>{{column.comment}}</td>
+      </tr>
+    {{/each}}
+    </tbody>
+  </table>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/ddl.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/ddl.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/ddl.hbs
new file mode 100644
index 0000000..cbbcdd0
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/ddl.hbs
@@ -0,0 +1,27 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="row">
+{{!--
+  {{ivy-codemirror value=table.ddl
+      options=(hash lineNumbers=true
+      readOnly=true
+      mode="sql"
+      theme="solarized")}}
+--}}
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/details.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/details.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/details.hbs
new file mode 100644
index 0000000..1577fec
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/details.hbs
@@ -0,0 +1,65 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="row">
+  <table class="table table-bordered table-hover">
+    <thead>
+    <tr>
+      <th>INFORMATION</th>
+      <th>VALUE</th>
+    </tr>
+    </thead>
+    {{#with table.detailedInfo as |info|}}
+      <tbody>
+        <tr>
+          <th>Database Name</th>
+          <td>{{info.dbName}}</td>
+        </tr>
+        <tr>
+          <th>Owner</th>
+          <td>{{info.owner}}</td>
+        </tr>
+        <tr>
+          <th>Create Time</th>
+          <td>{{info.createTime}}</td>
+        </tr>
+        <tr>
+          <th>Last Access Time</th>
+          <td>{{info.lastAccessTime}}</td>
+        </tr>
+        <tr>
+          <th>Retention</th>
+          <td>{{info.retention}}</td>
+        </tr>
+        <tr>
+          <th>Table Type</th>
+          <td>{{info.tableType}}</td>
+        </tr>
+        <tr>
+          <th>Location</th>
+          <td>{{info.location}}</td>
+        </tr>
+        <tr>
+          <th>Parameters</th>
+          <td><pre class="table-info-json">{{toJson info.parameters}}</pre></td>
+        </tr>
+      </tbody>
+    {{/with}}
+
+  </table>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/partitions.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/partitions.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/partitions.hbs
new file mode 100644
index 0000000..f66a0bc
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/partitions.hbs
@@ -0,0 +1,38 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="row">
+  <table class="table table-bordered table-hover">
+    <thead>
+    <tr>
+      <th>COLUMN NAME</th>
+      <th>COLUMN TYPE</th>
+      <th>COMMENT</th>
+    </tr>
+    </thead>
+    <tbody>
+    {{#each model.partitionInfo.columns as |column|}}
+      <tr>
+        <th>{{column.name}}</th>
+        <td>{{column.type}}</td>
+        <td>{{column.comment}}</td>
+      </tr>
+    {{/each}}
+    </tbody>
+  </table>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/stats.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/stats.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/stats.hbs
new file mode 100644
index 0000000..6671b8b
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/stats.hbs
@@ -0,0 +1,17 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/storage.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/storage.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/storage.hbs
new file mode 100644
index 0000000..6e76741
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/storage.hbs
@@ -0,0 +1,64 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="row">
+  <table class="table table-bordered table-hover">
+    <thead>
+    <tr>
+      <th>INFORMATION</th>
+      <th>VALUE</th>
+    </tr>
+    </thead>
+    {{#with table.storageInfo as |info|}}
+      <tbody>
+      <tr>
+        <th>SerDe Library</th>
+        <td>{{info.serdeLibrary}}</td>
+      </tr>
+      <tr>
+        <th>Input Format</th>
+        <td>{{info.inputFormat}}</td>
+      </tr>
+      <tr>
+        <th>Output Format</th>
+        <td>{{info.outputFormat}}</td>
+      </tr>
+      <tr>
+        <th>Compressed</th>
+        <td>{{info.compressed}}</td>
+      </tr>
+      <tr>
+        <th>Number of Buckets</th>
+        <td>{{info.numBuckets}}</td>
+      </tr>
+      <tr>
+        <th>Bucket Columns</th>
+        <td>{{info.bucketCols}}</td>
+      </tr>
+      <tr>
+        <th>Sort Columns</th>
+        <td>{{info.sortCols}}</td>
+      </tr>
+      <tr>
+        <th>Parameters</th>
+        <td><pre class="table-info-json">{{toJson info.parameters}}</pre></td>
+      </tr>
+      </tbody>
+    {{/with}}
+  </table>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/view.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/view.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/view.hbs
new file mode 100644
index 0000000..d703191
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/table/view.hbs
@@ -0,0 +1,40 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="row">
+  <table class="table table-bordered table-hover">
+    <thead>
+    <tr>
+      <th>INFORMATION</th>
+      <th>VALUE</th>
+    </tr>
+    </thead>
+    {{#with table.viewInfo as |info|}}
+      <tbody>
+        <tr>
+          <th>Original Text</th>
+          <td>{{info.originalText}}</td>
+        </tr>
+        <tr>
+          <th>Extended Text</th>
+          <td>{{info.extendedText}}</td>
+        </tr>
+      </tbody>
+    {{/with}}
+  </table>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/jobs.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/jobs.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/jobs.hbs
new file mode 100644
index 0000000..4382b7a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/jobs.hbs
@@ -0,0 +1,23 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{jobs-browser jobs=model
+               startTime=startTimeText
+               endTime=endTimeText
+               filterChanged="dateFilterChanged"
+}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/messages.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/messages.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/messages.hbs
new file mode 100644
index 0000000..b856c20
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/messages.hbs
@@ -0,0 +1,54 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="row">
+  <div class="col-md-12 messages-header">
+    <div class="col-md-1">
+      {{!--#link-to "files" (query-params path=currentBrowserPath) class="btn btn-primary"}}{{fa-icon "arrow-left"}} Browser{{/link-to--}}
+    </div>
+    <div class="col-md-2 col-md-offset-4 text-center">
+      <span class="messages-title">{{fa-icon "comment"}} Messages</span>
+    </div>
+  </div>
+</div>
+<div class="row">
+  <div class={{if isExpanded "col-md-12" "col-md-4"}}>
+    <div class="list-group">
+      {{#each model as |message|}}
+        {{#link-to 'messages.message' message class=(alert-message-context-class message.type "list-group-item list-group-item-")}}
+          <h4 class="list-group-item-heading wrap-message">
+            {{#fa-stack}}
+              {{fa-icon "circle-thin" stack=2}}
+              {{fa-icon (alert-message-icon-class message.type) stack=1}}
+            {{/fa-stack}}
+            {{{message.message}}}</h4>
+          <p class="list-group-item-text wrap-message">{{shorten-text message.responseMessage shortenLength}}</p>
+        {{/link-to}}
+      {{else}}
+        No messages present
+      {{/each}}
+    </div>
+
+  </div>
+  {{#unless isExpanded}}
+    <div class="col-md-8">
+      {{outlet}}
+    </div>
+  {{/unless}}
+</div>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/messages/message.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/messages/message.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/messages/message.hbs
new file mode 100644
index 0000000..0c69d58
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/messages/message.hbs
@@ -0,0 +1,48 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class={{alert-message-context-class model.type "panel panel-"}}>
+  <div class="panel-heading wrap-message">
+    {{#fa-stack size="lg"}}
+      {{fa-icon "circle-thin" stack=2}}
+      {{fa-icon (alert-message-icon-class model.type) stack=1}}
+    {{/fa-stack}}
+    {{{model.message}}}
+  </div>
+  {{#if displayBody}}
+    <div class="panel-body">
+      {{#if showStatus}}
+        <p><strong>Server status:</strong> {{model.status}}</p>
+        <hr/>
+      {{/if}}
+      {{#if model.responseMessage}}
+        {{alert-message-display title="Server Message:"
+        value=model.responseMessage
+        shorten=true
+        length=200}}
+        <hr/>
+      {{/if}}
+      {{#if model.trace}}
+        {{alert-message-display title="Error trace:"
+        value=model.trace
+        shorten=true
+        length=500}}
+      {{/if}}
+    </div>
+  {{/if}}
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/query.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/query.hbs
new file mode 100644
index 0000000..d24c7d0
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/query.hbs
@@ -0,0 +1,88 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{multiple-database-search-bar databases=model selected=selected xyz="xyz" }}
+
+<div class="clearfix">
+  <div class="col-md-9">
+    <div class="row query-editor">
+      {{query-editor query=currentQuery }}
+    </div>
+    <div class="row query-editor-controls">
+      <button class="btn btn-success" {{action "executeQuery" }}>{{fa-icon "check"}} Execute</button>
+    </div>
+  </div>
+
+  <div class="col-md-3">
+    <div class="row">
+    <div class="panel-group database-panel" id="accordion" role="tablist" aria-multiselectable="true">
+
+      {{#each selectedTablesModels as |tableModel|}}
+        <div class="panel panel-default">
+
+          <div class="panel-heading" role="tab">
+            <h4 class="panel-title">
+              <a role="button" data-toggle="collapse" data-parent="#accordion"
+                 href="javascript:void(0)" {{action 'showTables' tableModel.dbname }} aria-expanded="true"
+                 aria-controls={{tableModel.dbname}}>
+                {{ tableModel.dbname }}
+              </a>
+              <small class="pull-right">Tables({{tableModel.tables.length}})</small>
+            </h4>
+          </div>
+
+          <div id={{ tableModel.dbname }} class="panel-collapse collapse {{if singleDbModel 'in'}}" role="tabpanel"
+          aria-labelledby="headingOne">
+          <div class="panel-body">
+            {{#if tableModel.tables.length }}
+              {{#list-filter header="tables" items=tableModel.tables
+              placeholder="Search Tables"
+              as |filteredItems|}}
+                {{#list-group class="table-list" items=filteredItems as |item|}}
+                  {{list-item item=item itemClicked="tableSelected"}}
+                {{/list-group}}
+              {{/list-filter}}
+            {{else}}
+              <div class="empty">No Table found.</div>
+            {{/if}}
+          </div>
+        </div>
+
+      </div>
+      {{/each}}
+
+    </div>
+  </div>
+</div>
+</div>
+
+
+<div class="clearfix query-editor-results">
+  {{#if isQueryRunning}}
+    <div style="left:37%; position: relative;">
+      <img src="http://www.bba-reman.com/images/fbloader.gif" height="22" width="32" />
+    </div>
+  {{/if}}
+  {{query-result-table queryResult=queryResult showPreviousButton=showPreviousButton goNextPage='goNextPage' goPrevPage='goPrevPage' }}
+</div>
+
+{{#if databaseNotEmpty}}
+  {{database-not-empty name=databaseName close="notEmptyDialogClosed"}}
+{{/if}}
+
+{{outlet}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
new file mode 100644
index 0000000..b776fd2
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/savedqueries.hbs
@@ -0,0 +1,19 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{outlet}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/settings.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/settings.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/settings.hbs
new file mode 100644
index 0000000..047336f
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/settings.hbs
@@ -0,0 +1,32 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="col-md-12 table-info">
+  <div class="row table-body">
+    {{#setting-list settings=model newSettings="addNewSettings" as |setting|}}
+      {{#unless setting.editMode}}
+        {{setting-item setting=setting editAction="editAction" deleteAction="deleteAction"}}
+      {{else}}
+        {{edit-setting-item setting=setting
+                            hiveParameters=hiveParameters
+                            updateAction="updateAction"
+                            cancelAction="cancelAction"}}
+      {{/unless}}
+    {{/setting-list}}
+  </div>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/tables.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/tables.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/tables.hbs
new file mode 100644
index 0000000..b776fd2
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/tables.hbs
@@ -0,0 +1,19 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{outlet}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/udfs.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/udfs.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/udfs.hbs
new file mode 100644
index 0000000..b776fd2
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/udfs.hbs
@@ -0,0 +1,19 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{outlet}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/utils/hdfs-picker-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/utils/hdfs-picker-config.js b/contrib/views/hive20/src/main/resources/ui/app/utils/hdfs-picker-config.js
new file mode 100644
index 0000000..f4ed7db
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/utils/hdfs-picker-config.js
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import ViewerConfig from 'hdfs-directory-viewer/utils/viewer-config';
+
+export default ViewerConfig.extend({
+  store: null,
+
+  showOnlyDirectories: true,
+
+  expandIcon: 'fa fa-chevron-right',
+  collapseIcon: 'fa fa-chevron-down',
+
+  listDirectoryUrl(pathParams) {
+    return `${this.get('store').adapterFor('hdfs-viewer').get('namespace')}?${pathParams}`;
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/bower.json b/contrib/views/hive20/src/main/resources/ui/bower.json
new file mode 100644
index 0000000..4eadee7
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/bower.json
@@ -0,0 +1,11 @@
+{
+  "name": "ui",
+  "dependencies": {
+    "ember": "~2.7.0",
+    "ember-cli-shims": "~0.1.1",
+    "ember-qunit-notifications": "0.1.0",
+    "font-awesome": "~4.5.0",
+    "codemirror": "~5.15.0",
+    "bootstrap-treeview": "~1.2.0"
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/config/environment.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/config/environment.js b/contrib/views/hive20/src/main/resources/ui/config/environment.js
new file mode 100644
index 0000000..9efd491
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/config/environment.js
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* jshint node: true */
+
+module.exports = function(environment) {
+  var ENV = {
+    modulePrefix: 'ui',
+    environment: environment,
+    rootURL: '/',
+    locationType: 'hash',
+    EmberENV: {
+      FEATURES: {
+        // Here you can enable experimental features on an ember canary build
+        // e.g. 'with-controller': true
+      }
+    },
+
+    APP: {
+      // Here you can pass flags/options to your application instance
+      // when it is created
+    }
+  };
+
+  if (environment === 'development') {
+    // ENV.APP.LOG_RESOLVER = true;
+    // ENV.APP.LOG_ACTIVE_GENERATION = true;
+    // ENV.APP.LOG_TRANSITIONS = true;
+    // ENV.APP.LOG_TRANSITIONS_INTERNAL = true;
+    // ENV.APP.LOG_VIEW_LOOKUPS = true;
+  }
+
+  if (environment === 'test') {
+    // Testem prefers this...
+    ENV.locationType = 'none';
+
+    // keep test console output quieter
+    ENV.APP.LOG_ACTIVE_GENERATION = false;
+    ENV.APP.LOG_VIEW_LOOKUPS = false;
+
+    ENV.APP.rootElement = '#ember-testing';
+  }
+
+  if (environment === 'production') {
+
+  }
+
+  return ENV;
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js b/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js
new file mode 100644
index 0000000..e41c8e8
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/ember-cli-build.js
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*jshint node:true*/
+/* global require, module */
+var EmberApp = require('ember-cli/lib/broccoli/ember-app');
+
+module.exports = function(defaults) {
+  var app = new EmberApp(defaults, {
+    // Add options here
+    bootstrap: {
+      // List of Bootstrap plugins to use
+      plugins: ['dropdown']
+    },
+    fingerprint: {
+      enabled: false
+    },
+    codemirror: {
+      modes: ['sql'],
+      themes: ['solarized']
+    }
+  });
+
+  // Use `app.import` to add additional libraries to the generated
+  // output files.
+  //
+  // If you need to use different assets in different
+  // environments, specify an object as the first parameter. That
+  // object's keys should be the environment name and the values
+  // should be the asset to use in that environment.
+  //
+  // If the library that you are including contains AMD or ES6
+  // modules that you would like to import into your application
+  // please specify an object with the list of modules as keys
+  // along with the exports of each module as its value.
+
+
+   app.import('bower_components/codemirror/lib/codemirror.js');
+   app.import('bower_components/codemirror/addon/hint/sql-hint.js');
+   app.import('bower_components/codemirror/addon/hint/show-hint.js');
+   app.import('bower_components/codemirror/lib/codemirror.css');
+   app.import('bower_components/codemirror/addon/hint/show-hint.css');
+
+  /*
+  app.import('vendor/codemirror/codemirror-min.js');
+  app.import('vendor/codemirror/sql-hint.js');
+  app.import('vendor/codemirror/show-hint.js');
+  app.import('vendor/codemirror/codemirror.css');
+  app.import('vendor/codemirror/show-hint.css');
+  */
+
+  return app.toTree();
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/package.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/package.json b/contrib/views/hive20/src/main/resources/ui/package.json
new file mode 100644
index 0000000..ed7a299
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/package.json
@@ -0,0 +1,64 @@
+{
+  "name": "ui",
+  "version": "0.0.0",
+  "description": "Hive View UI",
+  "private": true,
+  "directories": {
+    "doc": "doc",
+    "test": "tests"
+  },
+  "scripts": {
+    "build": "ember build",
+    "start": "ember server",
+    "test": "ember test",
+    "preinstall": "chmod +x node/npm/bin/node-gyp-bin/node-gyp",
+    "postinstall": "bash node/with_new_path.sh node node_modules/.bin/bower --allow-root install"
+  },
+  "repository": "",
+  "engines": {
+    "node": ">= 0.10.0"
+  },
+  "author": "",
+  "license": "MIT",
+  "devDependencies": {
+    "bootstrap-daterangepicker": "2.1.24",
+    "bower": "^1.7.9",
+    "broccoli-asset-rev": "^2.4.2",
+    "ember-ajax": "^2.0.1",
+    "ember-cli": "2.7.0",
+    "ember-cli-app-version": "^1.0.0",
+    "ember-cli-babel": "^5.1.6",
+    "ember-cli-daterangepicker": "0.3.0",
+    "ember-cli-dependency-checker": "^1.2.0",
+    "ember-cli-flash": "1.4.0",
+    "ember-cli-htmlbars": "^1.0.3",
+    "ember-cli-htmlbars-inline-precompile": "^0.3.1",
+    "ember-cli-inject-live-reload": "^1.4.0",
+    "ember-cli-jshint": "^1.0.0",
+    "ember-cli-moment-shim": "3.0.1",
+    "ember-cli-qunit": "^2.0.0",
+    "ember-cli-release": "^0.2.9",
+    "ember-cli-sass": "5.6.0",
+    "ember-cli-sri": "^2.1.0",
+    "ember-cli-test-loader": "^1.1.0",
+    "ember-cli-uglify": "^1.2.0",
+    "ember-composable-helpers": "1.1.2",
+    "ember-data": "^2.7.0",
+    "ember-export-application-global": "^1.0.5",
+    "ember-font-awesome": "2.2.0",
+    "ember-light-table": "1.8.0",
+    "ember-load-initializers": "^0.5.1",
+    "ember-modal-dialog": "0.9.0",
+    "ember-moment": "7.2.0",
+    "ember-power-select": "1.0.0-beta.31",
+    "ember-resolver": "^2.0.3",
+    "ember-responsive": "2.0.0",
+    "ember-sass-bootstrap": "0.1.2",
+    "loader.js": "^4.0.1"
+  },
+  "ember-addon": {
+    "paths": [
+      "ext/hdfs-directory-viewer"
+    ]
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.eot
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.eot b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.eot
new file mode 100644
index 0000000..826acfd
Binary files /dev/null and b/contrib/views/hive20/src/main/resources/ui/public/fonts/roboto/roboto-v15-latin-300.eot differ


[06/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/services/alert-messages.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/alert-messages.js b/contrib/views/hive20/src/main/resources/ui/app/services/alert-messages.js
new file mode 100644
index 0000000..ed4cff1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/alert-messages.js
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+
+/**
+  Shows alert flash and also creates `alert` objects in store. If creation of
+  `alert` objects in store pass `options.flashOnly` as `true`. The options
+  required for creating the `alert` objects are:
+  ```
+    options.message: message field returned by the API server.
+    options.status : Status XHR request if the message is a response to XHR request. Defaults to -1.
+    options.error: Detailed error to be displayed.
+  ```
+  Options required for ember-cli-flash can also be passed in the alertOptions to override the
+  default behaviour.
+*/
+export default Ember.Service.extend({
+  flashMessages: Ember.inject.service('flash-messages'),
+  store: Ember.inject.service('store'),
+  alertsChanged: false,
+
+  currentUnreadMessages: function() {
+   return this.get('store').peekAll('alert').filter((entry) => {
+     return entry.get('read') === false;
+   });
+  },
+
+  setUnreadMessagesToRead: function() {
+    this.currentUnreadMessages().forEach((entry) => {
+      entry.set('read', true);
+    });
+    this.toggleProperty('alertsChanged');
+  },
+
+  currentMessagesCount: Ember.computed('alertsChanged', function() {
+    return this.currentUnreadMessages().get('length');
+  }),
+
+  success: function(message, options = {}, alertOptions = {}) {
+    this._processMessage('success', message, options, alertOptions);
+  },
+
+  warn: function(message, options = {}, alertOptions = {}) {
+    this._processMessage('warn', message, options, alertOptions);
+  },
+
+  info: function(message, options = {}, alertOptions = {}) {
+    this._processMessage('info', message, options, alertOptions);
+  },
+
+  danger: function(message, options = {}, alertOptions = {}) {
+    this._processMessage('danger', message, options, alertOptions);
+  },
+
+  clearMessages: function() {
+    this.get('flashMessages').clearMessages();
+  },
+
+  _processMessage: function(type, message, options, alertOptions) {
+    this._clearMessagesIfRequired(alertOptions);
+    let alertRecord = this._createAlert(message, type, options, alertOptions);
+    if(alertRecord) {
+      this.toggleProperty('alertsChanged');
+      message = this._addDetailsToMessage(message, alertRecord);
+    }
+    switch (type) {
+      case 'success':
+        this.get('flashMessages').success(message, this._getOptions(alertOptions));
+        break;
+      case 'warn':
+        this.get('flashMessages').warning(message, this._getOptions(alertOptions));
+        break;
+      case 'info':
+        this.get('flashMessages').info(message, this._getOptions(alertOptions));
+        break;
+      case 'danger':
+        this.get('flashMessages').danger(message, this._getOptions(alertOptions));
+    }
+  },
+
+  _addDetailsToMessage: function(message, record) {
+    let id = record.get('id');
+    let suffix = `<a href="#/messages/${id}">(details)</a>`;
+    return message + "  " + suffix;
+  },
+
+  _createAlert: function(message, type, options, alertOptions) {
+    var data = {};
+    data.message = message;
+    data.responseMessage = options.message || '';
+    data.id = this._getNextAlertId();
+    data.type = type;
+    data.status = options.status || -1;
+    data.trace = this._getDetailedError(options.trace);
+    delete options.status;
+    delete options.error;
+
+    if(alertOptions.flashOnly === true) {
+      return;
+    }
+    return this.get('store').createRecord('alert', data);
+  },
+
+  _getDetailedError: function(error) {
+    return error || '';
+  },
+
+  _getOptions: function(options = {}) {
+    var defaultOptions = {
+      priority: 100,
+      showProgress: true,
+      timeout: 6000
+    };
+    return Ember.merge(defaultOptions, options);
+  },
+
+  _getNextAlertId: function() {
+    return this.get('store').peekAll('alert').get('length') + 1;
+  },
+
+  _clearMessagesIfRequired: function(options = {}) {
+    var stackMessages = options.stackMessages || false;
+    if(stackMessages !== true) {
+      this.clearMessages();
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/services/create-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/create-table.js b/contrib/views/hive20/src/main/resources/ui/app/services/create-table.js
new file mode 100644
index 0000000..135f96a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/create-table.js
@@ -0,0 +1,182 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Service.extend({
+  store: Ember.inject.service(),
+
+  submitCreateTable(database, settings) {
+    let detailedInfo = this._getDetailedInfo(settings);
+    let storageInfo = this._getStorageInfo(settings);
+    let columns = this._getColumns(settings);
+    let partitionColumns = this._getPartitionColumns(settings);
+
+    let tableInfo = Ember.Object.create({
+      database: database,
+      table: settings.name,
+      columns: columns,
+      partitionInfo: { columns: partitionColumns },
+      detailedInfo: detailedInfo,
+      storageInfo: storageInfo
+    });
+    return new Promise((resolve, reject) => {
+      this.get('store').adapterFor('table').createTable(tableInfo).then((data) => {
+        this.get('store').pushPayload({job: data});
+        resolve(this.get('store').peekRecord('job', data.id));
+      }, (err) => {
+        reject(err);
+      });
+    });
+  },
+
+  waitForJobToComplete(jobId, after) {
+    return new Ember.RSVP.Promise((resolve, reject) => {
+      Ember.run.later(() => {
+        this.get('store').findRecord('job', jobId, {reload: true})
+          .then((job) => {
+            let status = job.get('status').toLowerCase();
+            if (status === 'succeeded') {
+              this._fetchDummyResult(jobId);
+              resolve();
+            } else if (status === 'error') {
+              reject()
+            } else {
+              resolve(this.waitForJobToComplete(jobId, after));
+            }
+          }, (error) => {
+            reject(error);
+          });
+      }, after);
+    });
+  },
+
+  _fetchDummyResult(jobId) {
+    this.get('store').adapterFor('job').fetchResult(jobId);
+  },
+
+  _getDetailedInfo(settings) {
+    let detailedInfo = {};
+    detailedInfo['parameters'] = this._getTableProperties(settings);
+
+    if (!Ember.isEmpty(settings.settings.location)) {
+      detailedInfo['location'] = settings.settings.location;
+    }
+
+    return detailedInfo;
+
+  },
+
+  _getStorageInfo(settings) {
+    const storageSettings = settings.settings;
+    let storageInfo = {};
+    let parameters = {};
+
+
+
+    if (!(Ember.isEmpty(storageSettings.fileFormat) || Ember.isEmpty(storageSettings.fileFormat.type))) {
+      storageInfo.fileFormat = storageSettings.fileFormat.type;
+      if (storageSettings.fileFormat.type === 'CUSTOM Serde') {
+        storageInfo.inputFormat = storageSettings.inputFormat;
+        storageInfo.outputFormat = storageSettings.outputFormat;
+      }
+    }
+
+    if (!Ember.isEmpty(storageSettings.rowFormat)) {
+      let addParameters = false;
+      if (!Ember.isEmpty(storageSettings.rowFormat.fieldTerminatedBy)) {
+        parameters['field.delim'] = String.fromCharCode(storageSettings.rowFormat.fieldTerminatedBy.id);
+        addParameters = true;
+      }
+
+      if (!Ember.isEmpty(storageSettings.rowFormat.linesTerminatedBy)) {
+        parameters['line.delim'] = String.fromCharCode(storageSettings.rowFormat.linesTerminatedBy.id);
+        addParameters = true;
+      }
+
+      if (!Ember.isEmpty(storageSettings.rowFormat.nullDefinedAs)) {
+        parameters['serialization.null.format'] = String.fromCharCode(storageSettings.rowFormat.fieldTerminatedBy.id);
+        addParameters = true;
+      }
+
+      if (!Ember.isEmpty(storageSettings.rowFormat.escapeDefinedAs)) {
+        parameters['escape.delim'] = String.fromCharCode(storageSettings.rowFormat.linesTerminatedBy.id);
+        addParameters = true;
+      }
+
+      if (addParameters) {
+        storageInfo.parameters = parameters;
+      }
+    }
+
+    if (!Ember.isEmpty(settings.settings.numBuckets)) {
+      storageInfo['numBuckets'] = settings.settings.numBuckets;
+    }
+
+    let clusteredColumnNames =  settings.columns.filterBy('isClustered', true).map((column) => {
+      return column.get('name');
+    });
+
+    if (clusteredColumnNames.length > 0) {
+      storageInfo['bucketCols'] = clusteredColumnNames;
+    }
+
+    return storageInfo;
+  },
+
+  _getColumns(settings) {
+    return settings.columns.filterBy('isPartitioned', false).map((column) => {
+      return {
+        name: column.get('name'),
+        type: column.get('type.label'),
+        comment: column.get('comment'),
+        precision: column.get('precision'),
+        scale: column.get('scale')
+      }
+    });
+  },
+
+  _getPartitionColumns(settings) {
+    return settings.columns.filterBy('isPartitioned', true).map((column) => {
+      return {
+        name: column.get('name'),
+        type: column.get('type.label'),
+        comment: column.get('comment'),
+        precision: column.get('precision'),
+        scale: column.get('scale')
+      }
+    });
+  },
+
+  _getTableProperties(settings) {
+    let properties = {};
+    settings.properties.forEach(function (property) {
+      properties[property.key] = property.value;
+    });
+
+    if (settings.settings.transactional) {
+      if (Ember.isEmpty(properties['transactional'])) {
+        properties['transactional'] = true;
+      }
+    }
+
+    return properties;
+  }
+
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
new file mode 100644
index 0000000..4928e5c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Service.extend({
+  store: Ember.inject.service(),
+  getQuery(jobId) {
+    return this.get('store').findRecord('job', jobId).then((job) => {
+      return this.get('store').findRecord('file', job.get('queryFile'));
+    })
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js b/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js
new file mode 100644
index 0000000..6bb12fb
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/keep-alive.js
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Service.extend({
+  store: Ember.inject.service(),
+  initialize: function() {
+    this.schedulePing();
+  },
+
+  schedulePing() {
+    this.get('store').adapterFor('ping').ping();
+    Ember.run.later(this.schedulePing.bind(this), 60000);
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/services/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/query.js b/contrib/views/hive20/src/main/resources/ui/app/services/query.js
new file mode 100644
index 0000000..5a2f62d
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/query.js
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Service.extend({
+
+  store: Ember.inject.service(),
+
+  createJob(payload){
+    return new Promise( (resolve, reject) => {
+      this.get('store').adapterFor('query').createJob(payload).then(function(data) {
+        resolve(data);
+      }, function(err) {
+        reject(err);
+      });
+    });
+  },
+  getJob(jobId, dateSubmitted, firstCall){
+    let self = this;
+    return new Promise( (resolve, reject) => {
+      this.get('store').adapterFor('query').getJob(jobId, dateSubmitted, firstCall).then(function(data) {
+        resolve(data);
+      }, function(err) {
+          reject(err);
+      });
+    });
+  }
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
new file mode 100644
index 0000000..812ae92
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
@@ -0,0 +1,774 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+@import 'bootstrap-overrides';
+@import 'fonts';
+
+@import "ember-modal-dialog/ember-modal-structure";
+@import "ember-modal-dialog/ember-modal-appearance";
+
+@import "power-select-overrides";
+
+
+.ember-modal-dialog {
+  box-shadow: 0 0 5px grey;
+  padding: 0;
+}
+.ember-modal-overlay.translucent {
+  z-index: 110;
+  background-color: rgba(#808080, .77);
+}
+
+
+.main-wrapper {
+  padding: 5px 0;
+  background-color: #fff;
+  height: 100%;
+}
+
+.fa-2 {
+  font-size: 2em;
+}
+
+$database-search-background: lighten($body-bg, 10%);
+$database-search-text-color: $gray;
+.database-search, .multiple-database-search {
+  color:  $database-search-text-color;
+  padding: 10px;
+  border: 1px solid darken($database-search-background, 25%);
+  background-color: $database-search-background;
+  border-collapse: collapse;
+  margin-bottom: 10px;
+
+  p {
+    margin-bottom: 5px;
+  }
+
+  .btn {
+    color: $database-search-text-color;
+  }
+
+  .ember-power-select-trigger{
+    border: 1px solid darken($database-search-background, 25%);
+    line-height: 3.2em;
+  }
+
+  .ember-power-select-trigger-multiple-input {
+    font-size: 18px;
+    padding: 0 16px;
+  }
+}
+
+.ember-power-select-option[aria-current="true"]{
+  color: #555;
+  background-color: #f5f5f5;
+}
+
+.ember-power-select-multiple-option {
+  border-radius: 3px;
+  border: 1px solid #ddd;
+  color: #555;
+  background-color: #e4e4e4;
+  padding: 0 4px;
+  display: inline-block;
+  line-height: 2;
+  float: left;
+  margin: 5px 0 2px 3px;
+}
+
+.ember-power-select-dropdown.ember-basic-dropdown-content--below, .ember-power-select-dropdown.ember-basic-dropdown-content--in-place {
+  border-top: none;
+  border-bottom: 1px solid #64A5ED;
+  border-top-left-radius: 0;
+  border-top-right-radius: 0;
+  border: 1px solid darken($database-search-background, 25%);
+}
+
+
+.database-search-drawer {
+  position: absolute;
+  max-height: 300px;
+  width: calc(100% - 157px);
+  z-index: 1000;
+  background-color: $database-search-background;
+  border: 1px solid darken($database-search-background, 25%);
+  overflow-y: auto;
+  border-top: none;
+
+  .list-group {
+    margin: 0;
+    font-size: 1.2em;
+    .list-group-item {
+      border-color: darken($database-search-background, 10%);
+      i {
+        &.noshow {
+          visibility: hidden;
+        }
+      }
+    }
+  }
+}
+
+$list-filter-header-background: lighten($body-bg, 10%);
+$list-filter-text-color: $gray;
+
+.list-filter {
+  color:  $list-filter-text-color;
+  .list-header {
+    p {
+      margin: 0;
+    }
+    padding: 15px 10px;
+    background-color: $list-filter-header-background;
+    border: 1px solid darken($list-filter-header-background, 15%);
+  }
+  .am-view-list-item {
+    &.active {
+      color: inherit;
+      background-color: #FEF5DD;
+      border: 1px solid darken($list-filter-header-background, 15%);
+    }
+  }
+}
+
+.table-list {
+  .list-group-item {
+    background-color: lighten($body-bg, 10%);
+  }
+}
+
+.search-bar {
+  padding: 0 10px;
+  background-color: darken($list-filter-header-background, 10%);
+  position: relative;
+  input {
+    width: calc(100% - 2.5em);
+    height: 2.8em;
+    background-color: transparent;
+    border: none;
+    margin-right: 10px;
+    outline: none;
+  }
+
+  a {
+    color: inherit;
+  }
+}
+
+$table-info-background: lighten($body-bg, 10%);
+.table-info {
+
+  background-color: $body-bg;
+  .table-header {
+    border: 1px solid darken($table-info-background, 15%);
+    p {
+      margin: 0;
+    }
+    padding: 15px 15px;
+    background-color: $table-info-background;
+  }
+  .table-body {
+    table {
+      background-color: $table-info-background;
+      margin: 0;
+      border: none;
+
+      thead {
+        background-color: $gray-dark;
+        color: #fff;
+      }
+    }
+  }
+}
+
+.table-name-input {
+  padding-top: 10px;
+  margin-bottom: -1px;
+  border-bottom: 1px solid darken($table-info-background, 25%);
+}
+
+pre {
+  &.table-info-json {
+    border: none;
+    background-color: inherit;
+  }
+}
+
+.dipayan {
+  .CodeMirror {
+    height: 100vh;
+  }
+}
+
+.dipayan123 {
+  background-color: lighten($body-bg, 5%);
+  height: calc(100vh - 180px);
+  overflow-y: scroll;
+}
+
+.hv-dropdown {
+  position: absolute;
+  .dropdown-menu {
+    margin-top: -1px;
+    padding: 0;
+    min-width: 200px;
+    a {
+      line-height: 1.5em;
+      font-size: 1em;
+      padding: 5px 15px 5px 15px;
+      &:hover {
+        background-color: #FEF5DD;
+      }
+      i {
+        padding-right: 10px;
+      }
+    }
+  }
+
+  &.tables-dropdown {
+    z-index: 100;
+    top: 8px;
+    right: 15px;
+  }
+
+  &.database-dropdown {
+    top: 18px;
+    right: 30px;
+  }
+}
+
+.new-settings {
+  background-color: #FEF5DD;
+  a {
+    text-decoration: none;
+    display: block;
+    width: 100%
+  }
+}
+
+.create-table-inner {
+  //min-height: 400px;
+}
+
+.create-table-controls {
+  padding-top: 15px;
+  padding-bottom: 15px;
+}
+
+.column-precision {
+  width: 100%;
+  padding-top: 5px;
+  input {
+    width: 50%;
+    //padding-right: 5px;
+  }
+}
+
+.query-editor {
+  border: 1px solid darken($database-search-background, 25%);
+  margin-right: 0;
+}
+
+.query-editor-controls {
+  padding-top: 15px;
+  padding-bottom: 15px;
+}
+
+.database-panel{
+
+  .panel-heading {
+    border: 1px solid #ddd;
+  }
+  .panel-default{
+    border: 0;
+    .panel-body{
+      padding: 0;
+    }
+  }
+
+  .list-header{
+    display: none;
+  }
+
+  .empty {
+    border: 1px solid #ddd;
+    border-top: none;
+    padding: 10px 15px;
+  }
+}
+
+.hdfs-viewer {
+  .modal-header {
+    p {
+      margin-top: 10px;
+      margin-bottom: 0;
+    }
+  }
+  .hdfs-viewer-wrap {
+    max-height: 300px;
+    border: 1px solid darken($table-info-background, 15%);
+    overflow-y: scroll;
+  }
+}
+
+.create-table-advanced-wrap {
+  padding: 10px;
+}
+
+.fileformat-custom-row {
+  margin-top: 10px;
+  border-top: 1px solid darken($table-info-background, 15%);
+  padding-top: 10px;
+}
+
+.rowformat-custom-row {
+  a {
+    i {
+      margin-top: 10px;
+    }
+  }
+}
+
+.alert {
+  &.create-table-error {
+    margin: 0;
+  }
+}
+
+
+.query-result-table{
+
+  .spinner,
+  .tip {
+    text-align: center
+  }
+
+  .spinner {
+    margin: 15px auto;
+    width: 50px;
+    height: 50px;
+    font-size: 10px
+  }
+
+  .spinner>div {
+    background-color: #dd6a58;
+    height: 100%;
+    width: 4px;
+    display: inline-block;
+    animation: sk-stretchdelay 1.2s infinite ease-in-out
+  }
+
+  .spinner .rect2 {
+    animation-delay: -1.1s
+  }
+
+  .spinner .rect3 {
+    animation-delay: -1s
+  }
+
+  .spinner .rect4 {
+    animation-delay: -.9s
+  }
+
+  .spinner .rect5 {
+    animation-delay: -.8s
+  }
+
+  @keyframes sk-stretchdelay {
+    0%,
+    100%,
+    40% {
+      transform: scaleY(.4);
+      -webkit-transform: scaleY(.4)
+    }
+    20% {
+      transform: scaleY(1);
+      -webkit-transform: scaleY(1)
+    }
+  }
+
+  .ember-light-table {
+    width: 95%;
+    margin: 0 auto;
+    border-collapse: collapse
+  }
+
+  .ember-light-table .multi-select {
+    -webkit-touch-callout: none;
+    -webkit-user-select: none;
+    -moz-user-select: none;
+    -ms-user-select: none;
+    user-select: none
+  }
+
+  .ember-light-table tfoot .lt-column {
+    border-top: 1px solid #DADADA
+  }
+
+  .ember-light-table thead .lt-column {
+    border-bottom: 1px solid #DADADA
+  }
+
+  .ember-light-table tfoot th.is-dragging,
+  .ember-light-table thead th.is-dragging {
+    opacity: .75;
+    background: #eee
+  }
+
+  .ember-light-table tfoot th.is-drag-target.drag-right,
+  .ember-light-table thead th.is-drag-target.drag-right {
+    border-right: 1px dotted #DADADA
+  }
+
+  .ember-light-table tfoot th.is-drag-target.drag-left,
+  .ember-light-table thead th.is-drag-target.drag-left {
+    border-left: 1px dotted #DADADA
+  }
+
+  .ember-light-table .lt-column {
+    font-weight: 200;
+    font-size: 12px;
+    padding: 10px
+  }
+
+  .ember-light-table .lt-column .lt-sort-icon {
+    width: 15px
+  }
+
+  .ember-light-table .lt-column.lt-group-column {
+    border: none!important;
+    padding-bottom: 10px
+  }
+
+  .ember-light-table .lt-column .lt-column-resizer {
+    border-right: 1px dashed #ccc;
+    border-left: 1px dashed #ccc
+  }
+
+  .ember-light-table .lt-row {
+    height: 50px
+  }
+
+  .ember-light-table .lt-row.is-selected {
+    background-color: #DEDEDE!important
+  }
+
+  .ember-light-table .lt-row:not(.is-selected):hover {
+    background-color: #F5F4F4!important
+  }
+
+  .ember-light-table .lt-row:last-of-type td {
+    border-bottom-width: 0
+  }
+
+  .ember-light-table .lt-row.lt-expanded-row:hover,
+  .ember-light-table .lt-row.lt-no-data:hover {
+    background-color: transparent!important
+  }
+
+  .ember-light-table .lt-row.lt-expanded-row td,
+  .ember-light-table .lt-row.lt-no-data td {
+    padding: 15px
+  }
+
+  .ember-light-table .lt-row td {
+    border-color: #DADADA;
+    border-width: 0 0 1px;
+    border-style: solid;
+    font-size: 13px;
+    padding: 0 10px
+  }
+
+  tfoot tr>td {
+    border-top: 1px solid #DADADA;
+    padding: 10px 10px 0;
+    font-size: 13px
+  }
+
+  tfoot form {
+    display: -ms-flexbox;
+    display: flex;
+    -ms-flex-pack: justify;
+    justify-content: space-between
+  }
+
+  body,
+  html {
+    min-height: 100%;
+    min-width: 100%;
+    background-color: #F3F3F3;
+    height: 100%
+  }
+
+  label {
+    font-weight: 600
+  }
+
+  .navbar.navbar-default {
+    background-color: #fff;
+    margin: 0
+  }
+
+  .navbar.navbar-default .navbar-brand {
+    color: #797979;
+    font-size: 16px;
+    font-weight: 400
+  }
+
+  .navbar.navbar-default .navbar-brand img {
+    height: 32px;
+    display: inline-block;
+    margin-top: -15px
+  }
+
+  .navbar.navbar-default .navbar-brand span {
+    font-size: 12px;
+    vertical-align: super;
+    text-transform: uppercase;
+    color: #444
+  }
+
+  .navbar.navbar-default .navbar-nav>li>a {
+    font-size: 14px;
+    font-weight: 200
+  }
+
+  .navbar.navbar-default .navbar-nav>li>a.github {
+    font-size: 24px
+  }
+
+  .navbar.navbar-default .navbar-nav>li>a:focus,
+  .navbar.navbar-default .navbar-nav>li>a:hover {
+    color: #dd6a58
+  }
+
+  .navbar.navbar-default .navbar-nav>.active>a,
+  .navbar.navbar-default .navbar-nav>.active>a:focus,
+  .navbar.navbar-default .navbar-nav>.active>a:hover {
+    color: #dd6a58;
+    background-color: transparent
+  }
+
+  .panel {
+    width: 75%;
+    margin: 2.5% auto;
+    position: relative
+  }
+
+  .panel .table-actions {
+    position: absolute;
+    left: -41px;
+    width: 40px;
+    background-color: #fff;
+    border-bottom-left-radius: 5px;
+    border-top-left-radius: 5px;
+    border: 1px solid #ccc;
+    border-right-width: 0;
+    top: 35%;
+    cursor: pointer
+  }
+
+  .panel .table-actions .table-action {
+    font-size: 20px;
+    text-align: center;
+    display: block;
+    margin: 10px;
+    color: #191919;
+    opacity: .8
+  }
+
+  .panel .table-actions .table-action:active,
+  .panel .table-actions .table-action:hover {
+    opacity: 1
+  }
+
+  .panel .table-actions .table-action.delete {
+    color: #dd6a58
+  }
+
+  .panel .panel-heading {
+    background-color: #dd6a58;
+    padding: 0
+  }
+
+  .panel .panel-heading a {
+    color: #F9F9F9;
+    text-decoration: none
+  }
+
+  .panel .panel-heading a .panel-title {
+    font-weight: 200;
+    padding: 15px
+  }
+
+  .panel .code-snippet {
+    background-color: #f8f8f8;
+    max-height: 500px;
+    overflow: auto
+  }
+
+  .panel .code-snippet .nav-tabs li>a {
+    color: #8A8A8A;
+    border-radius: 2px 2px 0 0;
+    font-size: 12px
+  }
+
+  .panel .code-snippet .nav-tabs li.active>a {
+    background-color: #f8f8f8;
+    color: #696969
+  }
+
+  .panel .code-snippet .tab-content>.active {
+    padding: 15px 15px 0
+  }
+
+  .panel .code-snippet pre {
+    border: none;
+    margin: 0;
+    padding: 0
+  }
+
+  .table-container {
+    overflow-y: auto
+  }
+
+  .table-container.fixed-header {
+    overflow-y: hidden;
+    margin-bottom: 15px
+  }
+
+  .user-actions a {
+    color: #dd6a58;
+    padding-right: 10px;
+    font-size: 18px;
+    width: 30px;
+    text-decoration: none;
+    cursor: pointer;
+    padding-top: 10px
+  }
+
+  .user-actions a:active,
+  .user-actions a:focus,
+  .user-actions a:hover {
+    color: #d95743
+  }
+
+  .user-avatar {
+    border-radius: 50%;
+    border: 1px solid #ccc
+  }
+
+  .row-toggle {
+    color: #dd6a58;
+    cursor: pointer
+  }
+
+  .row-toggle:active,
+  .row-toggle:focus,
+  .row-toggle:hover {
+    color: #d95743
+  }
+
+  .tip {
+    font-size: 13px;
+    color: #4E4E4E
+  }
+
+  .tip .icon-info {
+    color: #dd6a58;
+    margin-right: 3px
+  }
+
+  form .form-group input,
+  form .form-group select {
+    outline: 0;
+    display: block;
+    width: 100%;
+    padding: 0 15px;
+    border: 1px solid #d9d9d9;
+    color: #6E6E6E;
+    font-family: Roboto, sans-serif;
+    -webkti-box-sizing: border-box;
+    box-sizing: border-box;
+    font-size: 14px;
+    font-weight: 400;
+    -webkit-font-smoothing: antialiased;
+    -moz-osx-font-smoothing: grayscale;
+    transition: all .3s linear 0s;
+    box-shadow: none
+  }
+
+  form .form-group input:focus,
+  form .form-group select:focus {
+    color: #333;
+    border-color: #b9b9b9;
+    outline: 0;
+    box-shadow: none;
+    -webkit-box-shadow: none
+  }
+
+}
+
+.flash-messages-wrap {
+  position: absolute;
+  width: 40%;
+  min-width: 575px;
+  left: 50%;
+  margin-left: -20%;
+  z-index: 1000;
+}
+
+.flash-messages {
+  border-radius: $border-radius-large;
+  .alert-icon {
+    float: left;
+    margin-right: 15px;
+  }
+
+  .alert-message-wrap {
+    display: table;
+    min-height: 56px;
+    .alert-message {
+      display: table-cell;
+      vertical-align: middle;
+    }
+  }
+
+}
+
+.jobs-status {
+  border-top: 1px solid darken($body-bg, 10%);
+  border-bottom: 1px solid darken($body-bg, 10%);
+  .strip {
+    margin-top: 25px;
+  }
+}
+
+.jobs-table {
+  margin-top: 10px;
+}
+
+
+
+
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/styles/bootstrap-overrides.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/bootstrap-overrides.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/bootstrap-overrides.scss
new file mode 100644
index 0000000..ee9d373
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/bootstrap-overrides.scss
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+$font-family-sans-serif: 'Roboto', "Helvetica Neue", Helvetica, Arial, sans-serif;
+
+$body-bg: #e1e1e0;
+$border-radius-base:        3px;
+$border-radius-large:       4px;
+$border-radius-small:       2px;
+
+
+// nav-tabs
+$nav-link-padding:                          10px 20px !default;
+$nav-tabs-border-color:                     darken($body-bg, 15%);
+$nav-tabs-link-hover-border-color:          darken($body-bg, 15%);
+$nav-tabs-active-link-hover-bg:             #fff;
+$nav-tabs-active-link-hover-border-color:   darken($body-bg, 15%);
+
+@import 'bootstrap';
+
+.nav-tabs {
+  &.inverse {
+    li.active {
+       a {
+         background-color: $gray-dark;
+         color: #fff;
+       }
+    }
+  }
+  li {
+    a {
+      font-weight: bold;
+      color: darken($body-bg, 50%);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/styles/fonts.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/fonts.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/fonts.scss
new file mode 100644
index 0000000..c22e38b
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/fonts.scss
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* roboto-regular - latin */
+@font-face {
+  font-family: 'Roboto';
+  font-style: normal;
+  font-weight: 400;
+  src: url('../fonts/roboto/roboto-v15-latin-regular.eot'); /* IE9 Compat Modes */
+  src: local('Roboto'), local('Roboto-Regular'),
+  url('../fonts/roboto/roboto-v15-latin-regular.eot?#iefix') format('embedded-opentype'), /* IE6-IE8 */
+  url('../fonts/roboto/roboto-v15-latin-regular.woff2') format('woff2'), /* Super Modern Browsers */
+  url('../fonts/roboto/roboto-v15-latin-regular.woff') format('woff'), /* Modern Browsers */
+  url('../fonts/roboto/roboto-v15-latin-regular.ttf') format('truetype'), /* Safari, Android, iOS */
+  url('../fonts/roboto/roboto-v15-latin-regular.svg#Roboto') format('svg'); /* Legacy iOS */
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/styles/power-select-overrides.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/power-select-overrides.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/power-select-overrides.scss
new file mode 100644
index 0000000..5a52887
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/power-select-overrides.scss
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+$ember-power-select-border-color: #64A5ED;
+$ember-power-select-border-radius: 0;
+$ember-power-select-line-height: 2.25;
+
+
+@import 'ember-power-select';

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/application.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/application.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/application.hbs
new file mode 100644
index 0000000..7d6259c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/application.hbs
@@ -0,0 +1,42 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="container-fluid">
+  <div class="flash-messages-wrap">
+    {{#each flashMessages.queue as |flash|}}
+      {{alert-message flash=flash}}
+    {{/each}}
+  </div>
+  <div class="row">
+    <div class="col-md-12">
+      {{top-application-bar}}
+      {{#tabs-pane tabs=tabs as |tab|}}
+        {{tabs-item tab=tab tabs=tabs}}
+      {{/tabs-pane}}
+    </div>
+  </div>
+  <div class="row">
+    <div class="main-wrapper clearfix">
+      <div class="col-md-12">
+        {{outlet}}
+      </div>
+
+    </div>
+  </div>
+
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/.gitkeep b/contrib/views/hive20/src/main/resources/ui/app/templates/components/.gitkeep
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message-display.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message-display.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message-display.hbs
new file mode 100644
index 0000000..8dfa913
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message-display.hbs
@@ -0,0 +1,34 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+
+<p><strong>{{{title}}}</strong></p>
+<div>
+  <pre class="prettyprint">
+    <small>
+      {{shortenedValue}}
+      {{#if shorten}}
+        {{#unless expanded}}
+          <a href="#" {{action "toggleExpanded"}}>(more...)</a>
+        {{else}}
+          <a href="#" {{action "toggleExpanded"}}>(less...)</a>
+        {{/unless}}
+      {{/if}}
+    </small>
+  </pre>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs
new file mode 100644
index 0000000..ce8d941
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/alert-message.hbs
@@ -0,0 +1,34 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class={{alert-message-context-class flash.type "clearfix alert alert-dismissible alert-"}}>
+  <button type="button" class="close" {{action "closeAlert"}}><span aria-hidden="true">&times;</span></button>
+  <div class="alert-icon wrap-message">
+    {{#fa-stack size=2}}
+      {{fa-icon "circle-thin" stack=2}}
+      {{fa-icon (alert-message-icon-class flash.type) stack=1}}
+    {{/fa-stack}}
+  </div>
+  <div class="alert-message-wrap wrap-message">
+    <div class="alert-message">
+      {{{flash.message}}}
+    </div>
+  </div>
+
+</div>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/column-item.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/column-item.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/column-item.hbs
new file mode 100644
index 0000000..96cf5ab
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/column-item.hbs
@@ -0,0 +1,124 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<td>
+  <div class="{{if column.nameError 'has-error'}}">
+    {{input type="text" class="form-control" value=column.name disabled=(not column.editing)}}
+    {{#if column.nameError}}
+      <span class="help-block">{{column.nameError.error}}</span>
+    {{/if}}
+  </div>
+
+</td>
+<td>
+  {{#power-select
+    disabled=notEditing
+    selected=column.type
+    options=datatypes
+    searchField="label"
+    searchPlaceholder="Enter data type"
+    onchange=(action "typeSelectionMade") as |parameter|}}
+    {{parameter.label}}
+  {{/power-select}}
+</td>
+<td>
+  {{#if (or hasPrecision hasScale)}}
+    <div class="row">
+      {{#if hasPrecision}}
+        <div class="{{if hasScale 'col-md-6' 'col-md-12'}}">
+          <div class="{{if column.precisionError 'has-error'}}">
+            {{input type="text" class="form-control" placeholder="Precision" value=column.precision disabled=(not column.editing)}}
+            {{#if column.precisionError}}
+              <span class="help-block">{{column.precisionError.error}}</span>
+            {{/if}}
+          </div>
+        </div>
+      {{/if}}
+      {{#if hasScale}}
+        <div class="{{if hasPrecision 'col-md-6' 'col-md-12'}}">
+          <div class="{{if column.scaleError 'has-error'}}">
+            {{input type="text" class="form-control" placeholder="Scale" value=column.scale disabled=(not column.editing)}}
+            {{#if column.scaleError}}
+              <span class="help-block">{{column.scaleError.error}}</span>
+            {{/if}}
+          </div>
+        </div>
+      {{/if}}
+    </div>
+  {{/if}}
+
+</td>
+<td>
+  <div class="text-center">
+    <a {{action "advanceOptionToggle"}}>{{fa-icon "cog" size="lg"}}</a>
+  </div>
+  {{#if advancedOption}}
+    {{#modal-dialog
+      close="advanceOptionToggle"
+      translucentOverlay=true
+      clickOutsideToClose=true
+      container-class="modal-dialog"}}
+      <div class="modal-content">
+        <div class="modal-header text-danger">
+          <button type="button" class="close" {{action "advanceOptionToggle"}}><span aria-hidden="true">&times;</span></button>
+          <h4 class="modal-title">{{fa-icon "cog" size="lg"}} Advance Column Options</h4>
+        </div>
+        <div class="modal-body">
+          <div class="form-horizontal">
+            <div class="form-group">
+              <label for="inputPassword3" class="col-sm-2 control-label">Comment</label>
+              <div class="col-sm-10">
+                {{input type="text" class="form-control" placeholder="Comment" value=column.comment disabled=(not column.editing)}}
+              </div>
+            </div>
+
+            <div class="form-group">
+              <div class="col-sm-offset-2 col-sm-10">
+                <div class="checkbox">
+                  <label>
+                    {{input type="checkbox" checked=column.isPartitioned disabled=(not column.editing)}} Partitioned
+                  </label>
+                </div>
+              </div>
+              <div class="col-sm-offset-2 col-sm-10">
+                <div class="checkbox">
+                  <label>
+                    {{input type="checkbox" checked=column.isClustered disabled=(not column.editing)}} Clustering
+                  </label>
+                </div>
+              </div>
+            </div>
+          </div>
+        </div>
+        <div class="modal-footer">
+          <button type="button" class="btn btn-success" {{action "advanceOptionToggle"}}>{{fa-icon "check"}} Done</button>
+        </div>
+      </div>
+    {{/modal-dialog}}
+  {{/if}}
+</td>
+<td>
+  <div class="text-center">
+    {{#unless column.editing}}
+      <button class="btn btn-success" {{action "edit"}}>{{fa-icon "check"}} Edit</button>
+    {{/unless}}
+    <button class="btn btn-danger" {{action "delete"}}>{{fa-icon "times"}} Delete</button>
+  </div>
+
+
+</td>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/create-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/create-table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/create-table.hbs
new file mode 100644
index 0000000..f369783
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/create-table.hbs
@@ -0,0 +1,63 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="row">
+  <div class="col-md-12 form-horizontal table-name-input">
+    <div class="form-group {{if hasTableNameError 'has-error'}}">
+      <label class="col-md-1 control-label">Name</label>
+      <div class="col-md-11">
+        {{input type="text" class="form-control" placeholder="Table name" value=tableName}}
+        {{#if hasTableNameError}}
+          <span class="help-block">Name cannot be empty</span>
+        {{/if}}
+      </div>
+    </div>
+  </div>
+</div>
+
+{{#tabs-pane tabs=tabs inverse=true as |tab|}}
+  {{tabs-item tab=tab tabs=tabs activate="activate"}}
+{{/tabs-pane}}
+
+<div class="create-table-inner">
+  <div class="row">
+    {{#each tabs as |tab|}}
+      {{#if tab.active}}
+        {{#if (eq tab.link "create.table.columns")}}
+          {{#if hasEmptyColumnsError}}
+            <div class="alert alert-danger create-table-error">
+              {{emptyColumnsErrorText}}
+            </div>
+          {{/if}}
+          {{table-columns columns=columns shouldAddBuckets=shouldAddBuckets}}
+        {{/if}}
+        {{#if (eq tab.link "create.table.properties")}}
+          {{table-properties properties=properties}}
+        {{/if}}
+        {{#if (eq tab.link "create.table.advanced")}}
+          {{table-advanced-settings settings=settings shouldAddBuckets=shouldAddBuckets errors=settingErrors}}
+        {{/if}}
+      {{/if}}
+    {{/each}}
+  </div>
+
+</div>
+<div class="create-table-controls">
+  <button class="btn btn-success" {{action "create"}}>{{fa-icon "plus"}} Create</button>
+  <button class="btn btn-warning" {{action "cancel"}}>{{fa-icon "times"}} Cancel</button>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-drop-confirm.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-drop-confirm.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-drop-confirm.hbs
new file mode 100644
index 0000000..efb54ac
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-drop-confirm.hbs
@@ -0,0 +1,37 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#modal-dialog
+close="cancel"
+translucentOverlay=true
+clickOutsideToClose=true
+container-class="modal-dialog"}}
+  <div class="modal-content">
+    <div class="modal-header text-danger">
+      <button type="button" class="close" {{action "cancel"}}><span aria-hidden="true">&times;</span></button>
+      <h4 class="modal-title">{{fa-icon "exclamation-triangle" size="lg"}} Confirm</h4>
+    </div>
+    <div class="modal-body text-center">
+      <p class="">Are you sure you want to drop <span class="text-uppercase"><strong>'{{name}}'</strong></span> database?</p>
+    </div>
+    <div class="modal-footer">
+      <button type="button" class="btn btn-default" {{action "cancel"}}>{{fa-icon "times"}} Cancel</button>
+      <button type="button" class="btn btn-primary" {{action "confirm"}}>{{fa-icon "check"}} Confirm</button>
+    </div>
+  </div><!-- /.modal-content -->
+{{/modal-dialog}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-not-empty.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-not-empty.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-not-empty.hbs
new file mode 100644
index 0000000..e11e898
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-not-empty.hbs
@@ -0,0 +1,38 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#modal-dialog
+close="close"
+translucentOverlay=true
+clickOutsideToClose=true
+container-class="modal-dialog"}}
+  <div class="modal-content">
+    <div class="modal-header text-danger">
+      <button type="button" class="close" {{action "close"}}><span aria-hidden="true">&times;</span></button>
+      <h4 class="modal-title">{{fa-icon "exclamation-triangle" size="lg"}} Error</h4>
+    </div>
+    <div class="modal-body text-center">
+      <p class="">Cannot drop database. Database <span class="text-uppercase"><strong>'{{name}}'</strong></span> is not empty !</p>
+      <div class="clearfix">
+        <div class="col-md-offset-4 col-md-4">
+          <button type="button" class="btn btn-warning btn-block" {{action "close"}}>{{fa-icon "times"}} Ok</button>
+        </div>
+      </div>
+    </div>
+  </div><!-- /.modal-content -->
+{{/modal-dialog}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-search-bar.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-search-bar.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-search-bar.hbs
new file mode 100644
index 0000000..d4ac704
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/database-search-bar.hbs
@@ -0,0 +1,57 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="col-md-2">
+  <p class="text-left text-uppercase"><strong>{{heading}}</strong></p>
+  <p class="text-left text-muted"><small>{{subHeading}}</small></p>
+</div>
+<div class="col-md-9">
+  <div class="input-group input-group-lg">
+    {{#unless extendDrawer }}
+      {{input type="text" class="form-control display" value=selectedDatabase.name}}
+    {{else}}
+      {{input type="text" class="form-control search" value=filterText placeholder="Search Databases"}}
+    {{/unless}}
+
+    {{#if enableSecondaryAction}}
+      <div class="input-group-btn">
+        <button type="button" class="btn btn-default {{if extendDrawer 'active'}}" {{action 'secondaryActionClicked'}}>{{fa-icon secondaryActionFaIcon}} {{secondaryActionText}} <span class="caret"></span></button>
+      </div>
+    {{/if}}
+  </div>
+  {{#if extendDrawer }}
+    <div class="database-search-drawer">
+      <div class="list-group">
+        {{#each filteredDatabases as |database|}}
+            <a {{action "databaseClicked" database}} class="list-group-item {{if database.selected 'selected'}}">
+              {{#if database.selected}}
+                <div class="pull-right">
+                  {{fa-icon "check" size="lg"}}
+                </div>
+              {{/if}}
+
+              &nbsp;&nbsp;
+              {{fa-icon "database" size="lg"}}&nbsp;&nbsp;{{database.name}}
+            </a>
+        {{/each}}
+      </div>
+
+    </div>
+  {{/if}}
+</div>
+{{yield}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/edit-setting-item.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/edit-setting-item.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/edit-setting-item.hbs
new file mode 100644
index 0000000..5138696
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/edit-setting-item.hbs
@@ -0,0 +1,42 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<td>
+  {{#power-select
+    selected=selectedParam
+    options=hiveParameters
+    search=(action "searchAction")
+    searchField="name"
+    searchPlaceholder="Type the paramter"
+    onchange=(action "selectionMade") as |parameter|}}
+    {{parameter.name}}
+  {{/power-select}}
+</td>
+<td>
+  <div class="{{if invalid 'has-error'}}">
+    {{input type="text" class="form-control" value=selectedValue}}
+    {{#if invalid}}
+      <span class="help-block">{{currentError}}</span>
+    {{/if}}
+
+  </div>
+</td>
+<td>
+  <button class="btn btn-success" {{action "update"}}>{{fa-icon "save"}} update</button>
+  <button class="btn btn-danger" {{action "cancel"}}>{{fa-icon "times"}} cancel</button>
+</td>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/hdfs-viewer-modal.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/hdfs-viewer-modal.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/hdfs-viewer-modal.hbs
new file mode 100644
index 0000000..f3dce67
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/hdfs-viewer-modal.hbs
@@ -0,0 +1,50 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#modal-dialog
+  close="closeDirectoryViewer"
+  translucentOverlay=true
+  clickOutsideToClose=false
+  container-class="modal-dialog"}}
+  <div class="modal-content hdfs-viewer">
+    <div class="modal-header">
+      <div class="text-info">
+        <button type="button" class="close" {{action "closeDirectoryViewer"}}><span aria-hidden="true">&times;</span></button>
+        <h4 class="modal-title">{{fa-icon "database" size="lg"}} Select location</h4>
+      </div>
+      {{#if (and showSelectedPath hdfsLocation)}}
+        <p><strong>Current path:</strong> {{hdfsLocation}}</p>
+      {{/if}}
+
+    </div>
+    <div class="modal-body">
+      <div class="hdfs-viewer-wrap">
+        {{directory-viewer
+          config=config
+          errorAction="viewerError"
+          pathSelectAction="viewerSelectedPath"
+        }}
+      </div>
+    </div>
+
+    <div class="modal-footer">
+      <button type="button" class="btn btn-default" {{action "closeDirectoryViewer"}}>{{fa-icon "times"}} Close</button>
+      <button type="button" class="btn btn-primary" {{action "pathSelected"}}>{{fa-icon "check"}} Select</button>
+    </div>
+  </div><!-- /.modal-content -->
+{{/modal-dialog}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/job-item.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/job-item.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/job-item.hbs
new file mode 100644
index 0000000..291def6
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/job-item.hbs
@@ -0,0 +1,33 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<tr>
+  <td>{{job.id}}</td>
+  <td>{{job.title}}</td>
+  <td>{{job.status}}</td>
+  <td>{{moment-from-now job.dateSubmitted}}</td>
+  <td>{{job.duration}}</td>
+  <td><a {{action "toggleExpandJob" job.id}}>{{fa-icon "expand"}}</a></td>
+</tr>
+
+{{#if expanded}}
+  <tr>
+    <td colspan="6">{{queryFile.fileContent}}</td>
+  </tr>
+{{/if}}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/jobs-browser.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/jobs-browser.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/jobs-browser.hbs
new file mode 100644
index 0000000..84f16a7
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/jobs-browser.hbs
@@ -0,0 +1,62 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="row jobs-status">
+  <div class="col-md-12 ">
+    <div class="col-md-9">
+      <div class="strip">
+        <ul class="list-inline">
+          {{#each-in statusCounts as |status count|}}
+            <li>{{count}} <span class="text-uppercase">{{status}}</span></li>
+          {{/each-in}}
+        </ul>
+      </div>
+    </div>
+    <div class="col-md-3">
+      {{date-range-picker
+        start=startTime
+        end=endTime
+        opens="left"
+        applyAction=(action "setDateRange")
+      }}
+    </div>
+  </div>
+
+</div>
+
+<div class="row jobs-table">
+  <div class="col-md-12">
+    <table class="table table-striped">
+      <thead>
+        <tr>
+          <th width="10%">Job Id</th>
+          <th width="30%">Title <span class="pull-right">Dipayan</span> </th>
+          <th width="10%">status</th>
+          <th width="25%">Start time</th>
+          <th width="20%" >Duration</th>
+          <th>Action</th>
+        </tr>
+      </thead>
+      <tbody>
+        {{#each jobs as |job| }}
+          {{job-item job=job}}
+        {{/each}}
+      </tbody>
+    </table>
+  </div>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-filter.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-filter.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-filter.hbs
new file mode 100644
index 0000000..6ab731a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-filter.hbs
@@ -0,0 +1,33 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="list-header clearfix">
+  <div>
+    <p class="text-uppercase"><strong>{{header}}&nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp;{{items.length}}</strong></p>
+  </div>
+</div>
+
+<div class="search-bar">
+  {{input value=filterText class="search-query" placeholder=placeholder}}
+  {{#if emptyFilterText}}
+    <a {{action "enableFilter"}}>{{fa-icon "search" class="fa-flip-horizontal" size="lg"}}</a>
+  {{else}}
+    <a {{action "disableFilter"}}>{{fa-icon "times" class="fa-flip-horizontal" size="lg"}}</a>
+  {{/if}}
+</div>
+{{yield filteredItems}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-group.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-group.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-group.hbs
new file mode 100644
index 0000000..bd88dbd
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-group.hbs
@@ -0,0 +1,22 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#each items as |item|}}
+  {{yield item items}}
+{{/each}}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-item.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-item.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-item.hbs
new file mode 100644
index 0000000..4fbd86b
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/list-item.hbs
@@ -0,0 +1,22 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#if item.icon}}
+  {{fa-icon item.icon size="lg"}}&nbsp;&nbsp;
+{{/if}}
+{{item.name}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/multiple-database-search-bar.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/multiple-database-search-bar.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/multiple-database-search-bar.hbs
new file mode 100644
index 0000000..57001ed
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/multiple-database-search-bar.hbs
@@ -0,0 +1,45 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="col-md-2">
+  <p class="text-left text-uppercase"><strong>{{heading}}</strong></p>
+  <p class="text-left text-muted"><small>{{subHeading}}</small></p>
+</div>
+<div class="col-md-9">
+  <div class="input-group-lg">
+
+    {{#power-select-multiple
+    placeholder="Search databases"
+    options=allDbs
+    selected=selectedDbs
+    onchange=(pipe-action (action (mut selectedDbs)) (action "updateTables"))
+    onkeydown=(action "createOnEnter")
+    as |number|}}
+      {{fa-icon "database"}} {{number}}
+    {{/power-select-multiple}}
+
+
+    <span class="input-group-btn" style="top: 0;right: 130px;position: absolute;">
+      <button type="button" class="btn btn-default">{{fa-icon "folder"}} Browse <span class="caret"></span></button>
+    </span>
+
+
+  </div>
+</div>
+
+{{yield}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/property-item.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/property-item.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/property-item.hbs
new file mode 100644
index 0000000..e0ba696
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/property-item.hbs
@@ -0,0 +1,45 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<td>
+  <div class="{{if property.keyError 'has-error'}}">
+    {{input type="text" class="form-control" value=property.key disabled=(not property.editing)}}
+    {{#if property.keyError}}
+      <span class="help-block">{{property.keyError.error}}</span>
+    {{/if}}
+  </div>
+
+</td>
+
+<td>
+  <div class="{{if property.valueError 'has-error'}}">
+    {{input type="text" class="form-control" value=property.value disabled=(not property.editing)}}
+    {{#if property.valueError}}
+      <span class="help-block">{{property.valueError.error}}</span>
+    {{/if}}
+  </div>
+</td>
+
+<td>
+  <div class="text-center">
+    {{#unless property.editing}}
+      <button class="btn btn-success" {{action "edit"}}>{{fa-icon "check"}} Edit</button>
+    {{/unless}}
+    <button class="btn btn-danger" {{action "delete"}}>{{fa-icon "times"}} Delete</button>
+  </div>
+</td>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-editor.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-editor.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-editor.hbs
new file mode 100644
index 0000000..d9b9018
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-editor.hbs
@@ -0,0 +1,21 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{textarea id="code-mirror" rows="15" cols="20" value=currentQuery}}
+
+{{yield}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs
new file mode 100644
index 0000000..5001c99
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/query-result-table.hbs
@@ -0,0 +1,53 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#if columns.length}}
+  <div class="clearfix" style="text-align: right">
+    {{#if showPreviousButton}}
+        <button class="btn btn-success" {{action "goPrevPage" }}>{{fa-icon "arrow-left"}} </button>
+    {{/if}}
+    <button class="btn btn-success" {{action "goNextPage" }}>{{fa-icon "arrow-right"}} </button>
+  </div>
+{{/if}}
+
+<div class="clearfix">
+{{#light-table table height='65vh' as |t|}}
+  {{#if columns.length}}
+      {{t.head
+      onColumnClick=(action 'onColumnClick')
+      iconAscending='fa fa-sort-asc'
+      iconDescending='fa fa-sort-desc'
+      fixed=true
+      }}
+
+      {{#t.body
+      canSelect=false
+      onScrolledToBottom=(action 'onScrolledToBottom')
+      as |body|
+      }}
+        {{#if isLoading}}
+          {{#body.loader}}
+            {{table-loader}}
+          {{/body.loader}}
+        {{/if}}
+      {{/t.body}}
+  {{/if}}
+{{/light-table}}
+</div>
+
+{{yield}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/setting-item.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/setting-item.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/setting-item.hbs
new file mode 100644
index 0000000..bc22b74
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/setting-item.hbs
@@ -0,0 +1,28 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<td>
+  {{setting.key}}
+</td>
+<td>
+  {{setting.value}}
+</td>
+<td>
+  <button class="btn btn-success" {{action "edit"}}>{{fa-icon "edit"}} Edit</button>
+  <button class="btn btn-danger" {{action "delete"}}>{{fa-icon "trash"}} Delete</button>
+</td>

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/resources/ui/app/templates/components/setting-list.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/setting-list.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/setting-list.hbs
new file mode 100644
index 0000000..c3f9c76
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/setting-list.hbs
@@ -0,0 +1,37 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<table class="table table-bordered table-hover">
+  <thead>
+  <tr>
+    <th width="40%">KEY</th>
+    <th width="40%">VALUE</th>
+    <th width="20%">ACTIONS</th>
+  </tr>
+  </thead>
+  <tbody>
+  {{#each settings as |setting|}}
+    {{yield setting}}
+  {{/each}}
+  <tr class="new-settings text-center">
+    <td colspan="3">
+      <a {{action "addNewSettings"}}>{{fa-icon "plus"}} Add New</a>
+    </td>
+  </tr>
+  </tbody>
+</table>


[18/20] ambari git commit: AMBARI-19321 : Hive View 2.0 - Minimal view for Hive which includes new UI changes. Also made changes in poms as required (nitirajrathore)

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/StatementExecutor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/StatementExecutor.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/StatementExecutor.java
new file mode 100644
index 0000000..03332d9
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/StatementExecutor.java
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.Props;
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.ConnectionDelegate;
+import org.apache.ambari.view.hive20.actor.message.GetColumnMetadataJob;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.ResultInformation;
+import org.apache.ambari.view.hive20.actor.message.RunStatement;
+import org.apache.ambari.view.hive20.actor.message.StartLogAggregation;
+import org.apache.ambari.view.hive20.actor.message.job.Failure;
+import org.apache.ambari.view.hive20.actor.message.job.UpdateYarnAtsGuid;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.HiveStatement;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.UUID;
+
+/**
+ * Executes a single statement and returns the ResultSet if the statements generates ResultSet.
+ * Also, starts logAggregation and YarnAtsGuidFetcher if they are required.
+ */
+public class StatementExecutor extends HiveActor {
+
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  private final HdfsApi hdfsApi;
+  private final HiveConnection connection;
+  protected final Storage storage;
+  private final ConnectionDelegate connectionDelegate;
+  private ActorRef logAggregator;
+  private ActorRef guidFetcher;
+
+
+  public StatementExecutor(HdfsApi hdfsApi, Storage storage, HiveConnection connection, ConnectionDelegate connectionDelegate) {
+    this.hdfsApi = hdfsApi;
+    this.storage = storage;
+    this.connection = connection;
+    this.connectionDelegate = connectionDelegate;
+  }
+
+  @Override
+  public void handleMessage(HiveMessage hiveMessage) {
+    Object message = hiveMessage.getMessage();
+    if (message instanceof RunStatement) {
+      runStatement((RunStatement) message);
+    } else if (message instanceof GetColumnMetadataJob) {
+      getColumnMetaData((GetColumnMetadataJob) message);
+    }
+  }
+
+  private void runStatement(RunStatement message) {
+    try {
+      HiveStatement statement = connectionDelegate.createStatement(connection);
+      if (message.shouldStartLogAggregation()) {
+        startLogAggregation(statement, message.getStatement(), message.getLogFile().get());
+      }
+
+      if (message.shouldStartGUIDFetch() && message.getJobId().isPresent()) {
+        startGUIDFetch(message.getId(), statement, message.getJobId().get());
+      }
+      LOG.info("Statement executor is executing statement: {}, Statement id: {}, JobId: {}", message.getStatement(), message.getId(), message.getJobId().or("SYNC JOB"));
+      Optional<ResultSet> resultSetOptional = connectionDelegate.execute(message.getStatement());
+      LOG.info("Finished executing statement: {}, Statement id: {}, JobId: {}", message.getStatement(), message.getId(), message.getJobId().or("SYNC JOB"));
+
+      if (resultSetOptional.isPresent()) {
+        sender().tell(new ResultInformation(message.getId(), resultSetOptional.get()), self());
+      } else {
+        sender().tell(new ResultInformation(message.getId()), self());
+      }
+    } catch (SQLException e) {
+      LOG.error("Failed to execute statement: {}. {}", message.getStatement(), e);
+      sender().tell(new ResultInformation(message.getId(), new Failure("Failed to execute statement: " + message.getStatement(), e)), self());
+    } finally {
+      stopLogAggregation();
+      stopGUIDFetch();
+    }
+  }
+
+  private void startGUIDFetch(int statementId, HiveStatement statement, String jobId) {
+    if (guidFetcher == null) {
+      guidFetcher = getContext().actorOf(Props.create(YarnAtsGUIDFetcher.class, sender())
+        .withDispatcher("akka.actor.misc-dispatcher"), "YarnAtsGUIDFetcher:" + UUID.randomUUID().toString());
+    }
+    LOG.info("Fetching guid for Job Id: {}", jobId);
+    guidFetcher.tell(new UpdateYarnAtsGuid(statementId, statement, jobId), self());
+  }
+
+  private void stopGUIDFetch() {
+    if (guidFetcher != null) {
+      getContext().stop(guidFetcher);
+    }
+    guidFetcher = null;
+  }
+
+  private void startLogAggregation(HiveStatement statement, String sqlStatement, String logFile) {
+    if (logAggregator == null) {
+      logAggregator = getContext().actorOf(
+        Props.create(LogAggregator.class, hdfsApi, statement, logFile)
+          .withDispatcher("akka.actor.misc-dispatcher"), "LogAggregator:" + UUID.randomUUID().toString());
+    }
+    LOG.info("Fetching query logs for statement: {}", sqlStatement);
+    logAggregator.tell(new StartLogAggregation(sqlStatement), getSelf());
+  }
+
+  private void stopLogAggregation() {
+    if (logAggregator != null) {
+      getContext().stop(logAggregator);
+    }
+    logAggregator = null;
+  }
+
+
+  private void getColumnMetaData(GetColumnMetadataJob message) {
+    try {
+      ResultSet resultSet = connectionDelegate.getColumnMetadata(connection, message);
+      sender().tell(new ResultInformation(-1, resultSet), self());
+    } catch (SQLException e) {
+      LOG.error("Failed to get column metadata for databasePattern: {}, tablePattern: {}, ColumnPattern {}. {}",
+        message.getSchemaPattern(), message.getTablePattern(), message.getColumnPattern(), e);
+      sender().tell(new ResultInformation(-1,
+        new Failure("Failed to get column metadata for databasePattern: " + message.getSchemaPattern() +
+          ", tablePattern: " + message.getTablePattern() + ", ColumnPattern: " + message.getColumnPattern(), e)), self());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java
new file mode 100644
index 0000000..0581618
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/TableChangeNotifier.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.Props;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.internal.dto.TableInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *
+ */
+public class TableChangeNotifier extends HiveActor {
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  @Override
+  public void handleMessage(HiveMessage hiveMessage) {
+    Object message = hiveMessage.getMessage();
+    if(message instanceof TableUpdated) {
+      handleTableUpdated((TableUpdated) message);
+    } else if(message instanceof TableAdded) {
+      handleTableAdded((TableAdded) message);
+    } else if(message instanceof TableRemoved) {
+      handleTableRemoved((TableRemoved) message);
+    }
+  }
+
+  private void handleTableUpdated(TableUpdated message) {
+    LOG.info("Tables updated for table name: {}", message.getTableInfo().getName());
+  }
+
+  private void handleTableAdded(TableAdded message) {
+    LOG.info("Tables added for table name: {}", message.getTableInfo().getName());
+  }
+
+  private void handleTableRemoved(TableRemoved message) {
+    LOG.info("Tables removed for table name: {}", message.getTableName());
+  }
+
+  public static Props props() {
+    return Props.create(TableChangeNotifier.class);
+  }
+
+
+  public static class TableAdded {
+    private final TableInfo tableInfo;
+    public TableAdded(TableInfo tableInfo) {
+      this.tableInfo = tableInfo;
+    }
+
+    public TableInfo getTableInfo() {
+      return tableInfo;
+    }
+  }
+
+  public static class TableRemoved {
+    private final String tableName;
+    public TableRemoved(String tableName) {
+      this.tableName = tableName;
+    }
+
+    public String getTableName() {
+      return tableName;
+    }
+  }
+
+
+  public static class TableUpdated {
+    private final TableInfo tableInfo;
+    public TableUpdated(TableInfo tableInfo) {
+      this.tableInfo = tableInfo;
+    }
+
+    public TableInfo getTableInfo() {
+      return tableInfo;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/YarnAtsGUIDFetcher.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/YarnAtsGUIDFetcher.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/YarnAtsGUIDFetcher.java
new file mode 100644
index 0000000..e98864e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/YarnAtsGUIDFetcher.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor;
+
+import akka.actor.ActorRef;
+import org.apache.ambari.view.hive20.actor.message.HiveMessage;
+import org.apache.ambari.view.hive20.actor.message.job.SaveGuidToDB;
+import org.apache.ambari.view.hive20.actor.message.job.UpdateYarnAtsGuid;
+import org.apache.hive.jdbc.HiveStatement;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.concurrent.duration.Duration;
+
+import java.util.concurrent.TimeUnit;
+
+
+/**
+ * Queries YARN/ATS time to time to fetch the status of the ExecuteJob and updates database
+ */
+public class YarnAtsGUIDFetcher extends HiveActor {
+
+  private final Logger LOG = LoggerFactory.getLogger(getClass());
+
+  private final ActorRef jdbcConnectorActor;
+
+  public YarnAtsGUIDFetcher(ActorRef jdbcConnectorActor) {
+    this.jdbcConnectorActor = jdbcConnectorActor;
+  }
+
+  @Override
+  public void handleMessage(HiveMessage hiveMessage) {
+    Object message = hiveMessage.getMessage();
+    if(message instanceof UpdateYarnAtsGuid) {
+      updateGuid((UpdateYarnAtsGuid) message);
+    }
+  }
+
+  private void updateGuid(UpdateYarnAtsGuid message) {
+    HiveStatement statement = message.getStatement();
+    String jobId = message.getJobId();
+    String yarnAtsGuid = statement.getYarnATSGuid();
+
+    LOG.info("Fetched guid: {}, for job id: {}", yarnAtsGuid, jobId);
+
+    // If ATS GUID is not yet generated, we will retry after 1 second
+    if(yarnAtsGuid == null) {
+      LOG.info("Retrying to fetch guid");
+      getContext().system().scheduler()
+        .scheduleOnce(Duration.create(1, TimeUnit.SECONDS), getSelf(), message, getContext().dispatcher(), null);
+    } else {
+      jdbcConnectorActor.tell(new SaveGuidToDB(message.getStatementId(), yarnAtsGuid, jobId), self());
+      LOG.info("Message send to save GUID for Statement Id: {}, Job id: {}, Guid: {}", message.getStatementId(), message.getJobId(), yarnAtsGuid);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Connect.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Connect.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Connect.java
new file mode 100644
index 0000000..a7804b9
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Connect.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.AuthParams;
+import org.apache.ambari.view.hive20.internal.Connectable;
+import org.apache.ambari.view.hive20.internal.HiveConnectionWrapper;
+
+/**
+ * Connect message to be sent to the Connection Actor with the connection parameters
+ */
+public class Connect {
+
+  private final HiveJob.Type type;
+  private final String jobId;
+  private final String username;
+  private final String password;
+  private final String jdbcUrl;
+
+
+  private Connect(HiveJob.Type type, String jobId, String username, String password, String jdbcUrl) {
+    this.type = type;
+    this.jobId = jobId;
+    this.username = username;
+    this.password = password;
+    this.jdbcUrl = jdbcUrl;
+  }
+
+  public Connect(String jobId, String username, String password, String jdbcUrl) {
+    this(HiveJob.Type.ASYNC, jobId, username, password, jdbcUrl);
+  }
+
+  public Connect(String username, String password, String jdbcUrl) {
+    this(HiveJob.Type.SYNC, null, username, password, jdbcUrl);
+  }
+
+  public Connectable getConnectable(AuthParams authParams){
+    return new HiveConnectionWrapper(getJdbcUrl(),username,password, authParams);
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public String getPassword() {
+    return password;
+  }
+
+  public String getJdbcUrl() {
+    return jdbcUrl;
+  }
+
+  public HiveJob.Type getType() {
+    return type;
+  }
+
+  public Optional<String> getJobId() {
+    return Optional.fromNullable(jobId);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/CursorReset.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/CursorReset.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/CursorReset.java
new file mode 100644
index 0000000..0cf0e30
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/CursorReset.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+public class CursorReset {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ExecuteJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ExecuteJob.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ExecuteJob.java
new file mode 100644
index 0000000..1955a19
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ExecuteJob.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+public class ExecuteJob {
+  public final static String SYNC_JOB_MARKER = "SYNC";
+  private final Connect connect;
+  private final HiveJob job;
+
+  public ExecuteJob(Connect connect, HiveJob job) {
+    this.connect = connect;
+    this.job = job;
+  }
+
+  public Connect getConnect() {
+    return connect;
+  }
+
+  public HiveJob getJob() {
+    return job;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ExecuteQuery.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ExecuteQuery.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ExecuteQuery.java
new file mode 100644
index 0000000..e44c35c
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ExecuteQuery.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+public class ExecuteQuery {
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/FetchError.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/FetchError.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/FetchError.java
new file mode 100644
index 0000000..d23c53d
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/FetchError.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+/**
+ *
+ * Fetch the result for
+ *
+ */
+public class FetchError {
+    private final String jobId;
+    private final String username;
+
+    public FetchError(String jobId, String username) {
+        this.jobId = jobId;
+        this.username = username;
+    }
+
+    public String getJobId() {
+        return jobId;
+    }
+
+    public String getUsername() {
+        return username;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/FetchResult.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/FetchResult.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/FetchResult.java
new file mode 100644
index 0000000..f642fc7
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/FetchResult.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+/**
+ *
+ * Fetch the result for
+ *
+ */
+public class FetchResult {
+    private final String jobId;
+    private final String username;
+
+    public FetchResult(String jobId, String username) {
+        this.jobId = jobId;
+        this.username = username;
+    }
+
+    public String getJobId() {
+        return jobId;
+    }
+
+    public String getUsername() {
+        return username;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/GetColumnMetadataJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/GetColumnMetadataJob.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/GetColumnMetadataJob.java
new file mode 100644
index 0000000..a6ced40
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/GetColumnMetadataJob.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+public class GetColumnMetadataJob extends HiveJob {
+  private final String schemaPattern;
+  private final String tablePattern;
+  private final String columnPattern;
+
+  public GetColumnMetadataJob(String username,
+                              String schemaPattern, String tablePattern, String columnPattern) {
+    super(Type.SYNC, username);
+    this.schemaPattern = schemaPattern;
+    this.tablePattern = tablePattern;
+    this.columnPattern = columnPattern;
+  }
+
+  public GetColumnMetadataJob(String username,
+                              String tablePattern, String columnPattern) {
+    this(username, "*", tablePattern, columnPattern);
+  }
+
+  public GetColumnMetadataJob(String username,
+                              String columnPattern) {
+    this(username, "*", "*", columnPattern);
+  }
+
+  public GetColumnMetadataJob(String username) {
+    this(username, "*", "*", "*");
+  }
+
+  public String getSchemaPattern() {
+    return schemaPattern;
+  }
+
+  public String getTablePattern() {
+    return tablePattern;
+  }
+
+  public String getColumnPattern() {
+    return columnPattern;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/GetMoreLogs.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/GetMoreLogs.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/GetMoreLogs.java
new file mode 100644
index 0000000..5d9ae3a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/GetMoreLogs.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+
+public class GetMoreLogs {}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/HiveJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/HiveJob.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/HiveJob.java
new file mode 100644
index 0000000..b5538b1
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/HiveJob.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+public abstract class HiveJob {
+
+  private final String username;
+  private final Type type;
+
+  public HiveJob(Type type, String username) {
+    this.type = type;
+    this.username = username;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+
+
+
+  public Type getType() {
+    return type;
+  }
+
+
+  public enum Type {
+    SYNC,
+    ASYNC
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/HiveMessage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/HiveMessage.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/HiveMessage.java
new file mode 100644
index 0000000..87acacd
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/HiveMessage.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+
+import java.util.UUID;
+
+/**
+ * Message wrapper, Each message has a unique ID
+ */
+public class HiveMessage {
+
+    private String id = UUID.randomUUID().toString();
+
+    private Object message;
+
+    public HiveMessage(Object message) {
+        this.message = message;
+    }
+
+
+    public Object getMessage() {
+        return message;
+    }
+
+    public String getId() {
+        return id;
+    }
+
+    @Override
+    public String toString() {
+        return "HiveMessage{" +
+                "message=" + message +
+                ", id='" + id + '\'' +
+                '}';
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/JobRejected.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/JobRejected.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/JobRejected.java
new file mode 100644
index 0000000..7dda74f
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/JobRejected.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+public class JobRejected {
+
+  private final String username;
+  private final String jobId;
+  private final String message;
+
+  public JobRejected(String username, String jobId, String message) {
+    this.username = username;
+    this.jobId = jobId;
+    this.message = message;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public String getJobId() {
+    return jobId;
+  }
+
+  public String getMessage() {
+    return message;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/JobSubmitted.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/JobSubmitted.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/JobSubmitted.java
new file mode 100644
index 0000000..b248325
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/JobSubmitted.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+public class JobSubmitted {
+
+  private final String username;
+  private final String jobId;
+
+  public JobSubmitted(String username, String jobId) {
+    this.username = username;
+    this.jobId = jobId;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public String getJobId() {
+    return jobId;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/LogAggregationFinished.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/LogAggregationFinished.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/LogAggregationFinished.java
new file mode 100644
index 0000000..d375833
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/LogAggregationFinished.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+public class LogAggregationFinished {}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java
new file mode 100644
index 0000000..c8449dc
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/Ping.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+/**
+ * Ping message
+ */
+public class Ping {
+  private final String username;
+  private final String instanceName;
+
+  public Ping(String username, String instanceName) {
+    this.username = username;
+    this.instanceName = instanceName;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public String getInstanceName() {
+    return instanceName;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/RegisterActor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/RegisterActor.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/RegisterActor.java
new file mode 100644
index 0000000..9bd071a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/RegisterActor.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+import akka.actor.ActorRef;
+
+public class RegisterActor {
+
+    private ActorRef actorRef;
+
+    public RegisterActor(ActorRef actorRef) {
+        this.actorRef = actorRef;
+    }
+
+    public ActorRef getActorRef() {
+        return actorRef;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResetCursor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResetCursor.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResetCursor.java
new file mode 100644
index 0000000..3b9a323
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResetCursor.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+public class ResetCursor {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultInformation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultInformation.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultInformation.java
new file mode 100644
index 0000000..5b5e17c
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultInformation.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.actor.message.job.Failure;
+
+import java.sql.ResultSet;
+
+/**
+ * Message used to send execution complete message.
+ * It may contain a ResultSet if the execution returns a ResultSet.
+ */
+public class ResultInformation {
+  /**
+   * Execution id to identify the result correspondence of the result with the request
+   */
+  private final int id;
+
+  /**
+   * If the execution returns a ResultSet then this will refer to the ResultSet
+   */
+  private final ResultSet resultSet;
+
+  private final Failure failure;
+
+  private final boolean cancelled;
+
+  private ResultInformation(int id, ResultSet resultSet, Failure failure, boolean cancelled) {
+    this.id = id;
+    this.resultSet = resultSet;
+    this.failure = failure;
+    this.cancelled = cancelled;
+  }
+
+  public ResultInformation(int id, ResultSet resultSet) {
+    this(id, resultSet, null, false);
+  }
+
+  public ResultInformation(int id) {
+    this(id, null, null, false);
+  }
+
+  public ResultInformation(int id, Failure failure) {
+    this(id, null, failure, false);
+  }
+
+  public ResultInformation(int id, boolean cancelled) {
+    this(id, null, null, cancelled);
+  }
+
+  public int getId() {
+    return id;
+  }
+
+  public Optional<ResultSet> getResultSet() {
+    return Optional.fromNullable(resultSet);
+  }
+
+  public Optional<Failure> getFailure() {
+    return Optional.fromNullable(failure);
+  }
+
+  public boolean isCancelled() {
+    return cancelled;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultNotReady.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultNotReady.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultNotReady.java
new file mode 100644
index 0000000..0c8ddb5
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultNotReady.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+/**
+ * In case of a async execution, this is used to tell that the result has not completed and is not ready to be
+ * returned back
+ */
+public class ResultNotReady {
+  private final String jobId;
+  private final String username;
+  public ResultNotReady(String jobId, String username) {
+    this.jobId = jobId;
+    this.username = username;
+  }
+
+  public String getJobId() {
+    return jobId;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultReady.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultReady.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultReady.java
new file mode 100644
index 0000000..ac1c3e1
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/ResultReady.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+import akka.actor.ActorRef;
+import com.google.common.base.Optional;
+
+/**
+ * Fetch the result for
+ */
+public class ResultReady extends FetchResult {
+  private final ActorRef result;
+
+
+  public ResultReady(String jobId, String username, ActorRef result) {
+    super(jobId, username);
+    this.result = result;
+  }
+
+  public ResultReady(String jobId, String username) {
+    this(jobId, username, null);
+  }
+
+  public Optional<ActorRef> getResult() {
+    return Optional.fromNullable(result);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/RunStatement.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/RunStatement.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/RunStatement.java
new file mode 100644
index 0000000..69dd4a8
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/RunStatement.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+import com.google.common.base.Optional;
+
+/**
+ * Message sent by JdbcConnector to StatementExecutor to run a statement
+ */
+public class RunStatement {
+  /**
+   * This is the execution id meant to identify the executing statement sequence
+   */
+  private final int id;
+  private final String statement;
+  private final String logFile;
+  private final String jobId;
+  private final boolean startLogAggregation;
+  private final boolean startGUIDFetch;
+
+  public RunStatement(int id, String statement, String jobId, boolean startLogAggregation, String logFile, boolean startGUIDFetch) {
+    this.id = id;
+    this.statement = statement;
+    this.jobId = jobId;
+    this.logFile = logFile;
+    this.startLogAggregation = startLogAggregation;
+    this.startGUIDFetch = startGUIDFetch;
+  }
+
+  public RunStatement(int id, String statement) {
+    this(id, statement, null, false, null, false);
+  }
+
+  public int getId() {
+    return id;
+  }
+
+  public String getStatement() {
+    return statement;
+  }
+
+  public Optional<String> getLogFile() {
+    return Optional.fromNullable(logFile);
+  }
+
+  public boolean shouldStartLogAggregation() {
+    return startLogAggregation;
+  }
+
+  public boolean shouldStartGUIDFetch() {
+    return startGUIDFetch;
+  }
+
+  public Optional<String> getJobId() {
+    return Optional.fromNullable(jobId);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/SQLStatementJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/SQLStatementJob.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/SQLStatementJob.java
new file mode 100644
index 0000000..22633b1
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/SQLStatementJob.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+import com.google.common.base.Optional;
+import org.apache.commons.lang.StringUtils;
+
+import java.util.Arrays;
+import java.util.Collection;
+
+
+public class SQLStatementJob extends HiveJob {
+
+  public static final String SEMICOLON = ";";
+  private String[] statements;
+
+  private final String jobId;
+  private final String logFile;
+
+  public SQLStatementJob(Type type, String[] statements, String username, String jobId, String logFile) {
+    super(type, username);
+    this.statements = new String[statements.length];
+    this.jobId = jobId;
+    this.logFile = logFile;
+    for (int i = 0; i < statements.length; i++) {
+      this.statements[i] = clean(statements[i]);
+    }
+  }
+  public SQLStatementJob(Type type, String[] statements, String username) {
+    this(type, statements, username, null, null);
+  }
+
+  private String clean(String statement) {
+    return StringUtils.trim(statement);
+  }
+
+  public Collection<String> getStatements() {
+    return Arrays.asList(statements);
+  }
+
+  public Optional<String> getJobId() {
+    return Optional.fromNullable(jobId);
+  }
+
+  public Optional<String> getLogFile() {
+    return Optional.fromNullable(logFile);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/StartLogAggregation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/StartLogAggregation.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/StartLogAggregation.java
new file mode 100644
index 0000000..922ad1d
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/StartLogAggregation.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message;
+
+public class StartLogAggregation {
+  private String statement;
+
+  public StartLogAggregation() {
+  }
+
+  public StartLogAggregation(String statement) {
+    this.statement = statement;
+  }
+
+  public String getStatement() {
+    return statement;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/AsyncExecutionFailed.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/AsyncExecutionFailed.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/AsyncExecutionFailed.java
new file mode 100644
index 0000000..968167e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/AsyncExecutionFailed.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+public class AsyncExecutionFailed extends ExecutionFailed {
+  private final String jobId;
+  private final String username;
+
+  public AsyncExecutionFailed(String jobId,String username, String message, Throwable error) {
+    super(message, error);
+    this.jobId = jobId;
+    this.username = username;
+  }
+
+  public AsyncExecutionFailed(String jobId,String username, String message) {
+    super(message);
+    this.jobId = jobId;
+    this.username = username;
+  }
+
+
+
+  public String getJobId() {
+    return jobId;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/CancelJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/CancelJob.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/CancelJob.java
new file mode 100644
index 0000000..694fc29
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/CancelJob.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+/**
+ * Message to cancel the currently running job. This is used for stopping execution of a job from api
+ */
+public class CancelJob {
+  private final String jobId;
+  private final String username;
+
+  public CancelJob(String jobId, String username) {
+    this.jobId = jobId;
+    this.username = username;
+  }
+
+  public String getJobId() {
+    return jobId;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ExecuteNextStatement.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ExecuteNextStatement.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ExecuteNextStatement.java
new file mode 100644
index 0000000..ac48767
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ExecuteNextStatement.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+public class ExecuteNextStatement {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ExecutionFailed.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ExecutionFailed.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ExecutionFailed.java
new file mode 100644
index 0000000..15bcbf5
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ExecutionFailed.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+public class ExecutionFailed extends Failure {
+
+  public ExecutionFailed(String message, Throwable error) {
+    super(message, error);
+  }
+
+  public ExecutionFailed(String message) {
+    super(message, new Exception(message));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Failure.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Failure.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Failure.java
new file mode 100644
index 0000000..1efb132
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Failure.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+public class Failure {
+  private final Throwable error;
+  private final String message;
+
+  public Failure(String message, Throwable error) {
+    this.message = message;
+    this.error = error;
+  }
+
+  public Throwable getError() {
+    return error;
+  }
+
+  public String getMessage() {
+    return message;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/FetchFailed.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/FetchFailed.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/FetchFailed.java
new file mode 100644
index 0000000..c86867a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/FetchFailed.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+public class FetchFailed extends Failure{
+
+  public FetchFailed(String message, Throwable error) {
+    super(message, error);
+  }
+
+  public FetchFailed(String message) {
+    this(message, new Exception(message));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Next.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Next.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Next.java
new file mode 100644
index 0000000..0fc99ac
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Next.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+public class Next {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/NoMoreItems.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/NoMoreItems.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/NoMoreItems.java
new file mode 100644
index 0000000..639ac05
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/NoMoreItems.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+public class NoMoreItems {}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/NoResult.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/NoResult.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/NoResult.java
new file mode 100644
index 0000000..7b43ae8
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/NoResult.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+public class NoResult {}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Result.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Result.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Result.java
new file mode 100644
index 0000000..f8c3ba0
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/Result.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.Row;
+
+import java.util.List;
+
+public class Result {
+  private final List<ColumnDescription> columns;
+  private final List<Row> rows;
+
+  public Result(List<Row> rows, List<ColumnDescription> columns) {
+    this.rows = ImmutableList.copyOf(rows);
+    this.columns = columns;
+  }
+
+  public List<Row> getRows() {
+    return rows;
+  }
+
+  public List<ColumnDescription> getColumns() {
+    return columns;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ResultSetHolder.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ResultSetHolder.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ResultSetHolder.java
new file mode 100644
index 0000000..8089d26
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/ResultSetHolder.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+import akka.actor.ActorRef;
+
+public class ResultSetHolder {
+  private final ActorRef iterator;
+
+  public ResultSetHolder(ActorRef iterator) {
+    this.iterator = iterator;
+  }
+
+  public ActorRef getIterator() {
+    return iterator;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/SaveDagInformation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/SaveDagInformation.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/SaveDagInformation.java
new file mode 100644
index 0000000..0c16c99
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/SaveDagInformation.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+/**
+ * Message to save the Dag Information like the dagName, dagId and ApplicationId
+ */
+public class SaveDagInformation {
+  private final String jobId;
+  private final String dagName;
+  private final String dagId;
+  private final String applicationId;
+
+  public SaveDagInformation(String jobId, String dagName, String dagId, String applicationId) {
+    this.jobId = jobId;
+    this.dagName = dagName;
+    this.dagId = dagId;
+    this.applicationId = applicationId;
+  }
+
+  public String getJobId() {
+    return jobId;
+  }
+
+  public String getDagName() {
+    return dagName;
+  }
+
+  public String getDagId() {
+    return dagId;
+  }
+
+  public String getApplicationId() {
+    return applicationId;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/SaveGuidToDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/SaveGuidToDB.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/SaveGuidToDB.java
new file mode 100644
index 0000000..a55b02d
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/SaveGuidToDB.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+/**
+ * Message to ask JdbcConnector for job to update the GUID for the current statement in the database for the job.
+ */
+public class SaveGuidToDB {
+  private final int statementId;
+  private final String guid;
+  private final String jobId;
+
+  public SaveGuidToDB(int statementId, String guid, String jobId) {
+    this.statementId = statementId;
+    this.guid = guid;
+    this.jobId = jobId;
+  }
+
+  public int getStatementId() {
+    return statementId;
+  }
+
+  public String getGuid() {
+    return guid;
+  }
+
+  public String getJobId() {
+    return jobId;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/UpdateYarnAtsGuid.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/UpdateYarnAtsGuid.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/UpdateYarnAtsGuid.java
new file mode 100644
index 0000000..0af95ef
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/job/UpdateYarnAtsGuid.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.job;
+
+import org.apache.hive.jdbc.HiveStatement;
+
+public class UpdateYarnAtsGuid {
+  private final int statementId;
+  private final HiveStatement statement;
+  private final String jobId;
+  public UpdateYarnAtsGuid(int statementId, HiveStatement statement, String jobId) {
+    this.statementId = statementId;
+    this.statement = statement;
+    this.jobId = jobId;
+  }
+
+  public int getStatementId() {
+    return statementId;
+  }
+
+  public HiveStatement getStatement() {
+    return statement;
+  }
+
+  public String getJobId() {
+    return jobId;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/CleanUp.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/CleanUp.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/CleanUp.java
new file mode 100644
index 0000000..d16364e
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/CleanUp.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.lifecycle;
+
+public class CleanUp {}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/DestroyConnector.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/DestroyConnector.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/DestroyConnector.java
new file mode 100644
index 0000000..1f22a3b
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/message/lifecycle/DestroyConnector.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.actor.message.lifecycle;
+
+public class DestroyConnector {
+  private final String username;
+  private final String jobId;
+  private final boolean forAsync;
+
+  public DestroyConnector(String username, String jobId, boolean forAsync) {
+    this.username = username;
+    this.jobId = jobId;
+    this.forAsync = forAsync;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public String getJobId() {
+    return jobId;
+  }
+
+  public boolean isForAsync() {
+    return forAsync;
+  }
+
+  @Override
+  public String toString() {
+    return "DestroyConnector{" +
+      "username='" + username + '\'' +
+      ", jobId='" + jobId + '\'' +
+      ", forAsync=" + forAsync +
+      '}';
+  }
+}