You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by pa...@apache.org on 2016/02/26 10:59:19 UTC

[1/3] ambari git commit: AMBARI-15046. Hive view Upload table feature now supports upload from HDFS and Local.Supports JSON/XML/CSV as uploaded files and ORC and all other hive internal storage types. (Nitiraj Rathore via pallavkul)

Repository: ambari
Updated Branches:
  refs/heads/branch-2.2 fc8f637b5 -> 0747b6c77


http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
index d811e34..b7d9011 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
@@ -16,77 +16,153 @@
 * limitations under the License.
 }}
 
-<div class="pull-right">
-  <i class="query-menu-tab fa queries-icon query-context-tab fa-envelope" {{ action 'toggleErrors'}}></i>
-</div>
-
 <div style="width : 90%">
-<div class="main-content">
-  {{#if showErrors}}
-    {{render 'messages'}}
-  {{/if}}
+  <div class="main-content">
+    {{#if showErrors}}
+      {{render 'messages'}}
+    {{/if}}
+  </div>
 </div>
+
+<div id="uploadProgressModal" class="modal fade" role="dialog" data-backdrop="static">
+  <div class="modal-dialog">
+
+    <!-- Modal content-->
+    <div class="modal-content">
+      <div class="modal-header">
+        <h4 class="modal-title">Upload Progress</h4>
+      </div>
+      <div class="modal-body">
+        <p>
+        <ul>
+          {{uploadProgressInfo}}
+        </ul>
+        </p>
+      </div>
+    </div>
+
+  </div>
 </div>
 
-{{#unless showErrors}}
-<div>
-  {{file-upload filesUploaded="filesUploaded"}}
+<div class="pull-right">
+  <i class="query-menu-tab fa queries-icon fa-envelope" {{ action 'toggleErrors'}}></i>
 </div>
+<div {{bind-attr class="showErrors:hide-data:show-data"}}>
+  <div>
+    <table class="table data-upload-form pull-left">
+      <tr>
+        <td class="data-upload-form-label"><label>Upload from Local</label></td>
+        <td  class="data-upload-form-field"> {{radio-button value='local' checked=uploadSource}}</td>
 
-  <!--<div class='fa query-menu-tab fa queries-icon query-context-tab fa-envelope'></div>-->
+        <td class="data-upload-form-label"><label>Upload from HDFS</label></td>
+        <td  class="data-upload-form-field">{{radio-button value='hdfs' checked=uploadSource}}</td>
+      </tr>
+      <tr>
+        <td class="data-upload-form-label"><label>File type</label></td>
+        <td class="data-upload-form-field">
+          {{typeahead-widget
+          content=inputFileTypes
+          optionValuePath="id"
+          optionLabelPath="name"
+          selection=inputFileType
+          placeholder="Select File Type"}}
+        </td>
+
+
+        {{#if isLocalUpload }}
+          <td class="data-upload-form-label"><label>Select from local</label></td>
+          <td class="data-upload-form-field">{{file-upload  filesUploaded="filesUploaded"}}</td>
+        {{else}}
+          <td class="data-upload-form-label"><label>HDFS Path</label></td>
+          <td class="data-upload-form-field" id="hdfs-param">{{input type="text" class="form-control" placeholder="Enter full HDFS path" value=hdfsPath }}
+            <button style="margin-left: 5px; padding-top: 6px;padding-bottom: 6px; padding-right: 10px; padding-left: 10px;" type="button" {{action "previewFromHdfs"}}
+            {{bind-attr class=":btn :btn-sm :btn-default"}}>{{t "buttons.showPreview"}}</button></td>
+        {{/if}}
+      </tr>
+      {{#if rows}}
+        <tr>
+          <td class="data-upload-form-label"><label>Database</label></td>
+          <td class="data-upload-form-field">
+            {{typeahead-widget
+            content=controllers.databases.databases
+            optionValuePath="id"
+            optionLabelPath="name"
+            selection=selectedDatabase
+            placeholder="Select a Database"
+            }}
+          </td>
+
+          <td class="data-upload-form-label"><label>Table name</label></td>
+          <td
+            class="data-upload-form-field">{{input type="text" class="form-control" placeholder="Table Name" value=tableName }}</td>
+        </tr>
+        <tr>
+          <td class="data-upload-form-label"><label>Stored as</label></td>
+          <td class="data-upload-form-field">
+            {{typeahead-widget
+            content=fileTypes
+            selection=selectedFileType}}
+          </td>
+          {{#if inputFileTypeCSV }}
+            <td class="data-upload-form-label"><label>Is first row header ?</label></td>
+            <td class="data-upload-form-field">
+              {{input id="isFirstRowHeader" type="checkbox" checked=isFirstRowHeader }}
+            </td>
+
+          {{/if}}
+        </tr>
+      {{/if}}
+    </table>
+
+    <table class="pull-right">
+      <tr>
+        <td>
+          {{#if rows}}
+            <button type="button" {{action "uploadTable"}}
+              {{bind-attr class=":btn :btn-sm :btn-default"}}>{{t "buttons.uploadTable"}}</button>
+          {{/if}}
+        </td>
+      </tr>
+    </table>
 
-  {{#if rows}}
-  <div class="query-results-tools">
-    <div class="pull-right">
-      <button type="button" {{action
-      "createTableAndUploadFile"}}
-      {{bind-attr class=":btn :btn-sm :btn-default"}}>{{t "buttons.uploadTable"}}</button>
-    </div>
   </div>
 
   <div>
-    <div class="col-md-3">Database :
-      {{typeahead-widget
-      content=controllers.databases.databases
-      optionValuePath="id"
-      optionLabelPath="name"
-      selection=selectedDatabase
-      placeholder="Select a Database"
-      }}
-    </div>
-    <div class="col-md-3">Table Name : {{input type="text" class="form-control" placeHolder="Table Name" value=tableName }}
-    </div>
-    <div class="col-md-3">Is First Row Header? :{{input id="isFirstRowHeader" type="checkbox" class="form-control" checked=isFirstRowHeader }}
-    </div>
+    {{#if rows}}
+      <div id="upload-table">
+        <table class="table table-expandable no-border">
+          <thead>
+          <tr>
+            {{#each column in header}}
+              <th>{{input placeholder="column-name" type="text" class="form-control" value=column.name}}</th>
+            {{/each}}
+          </tr>
+          <tr id="upload-controls">
+            {{#each column in header}}
+              <th>
+                <table>
+                  <tbody>
+                  <tr>
+                    <td>{{typeahead-widget content=dataTypes selection=column.type }}</td>
+                    {{input-header column=column dataTypes=dataTypes}}
+                  </tr>
+                  </tbody>
+                </table>
+              </th>
+            {{/each}}
+          </tr>
+          </thead>
+          <tbody>
+          {{#each row in rows}}
+            <tr>
+              {{#each item in row.row}}
+                <td>{{item}}</td>
+              {{/each}}
+            </tr>
+          {{/each}}
+          </tbody>
+        </table>
+      </div>
+    {{/if}}
   </div>
-<div id="upload-table">
-  <table class="table table-expandable">
-    <thead>
-    <tr>
-      {{#each column in header}}
-      <th> {{input type="text" class="form-control" value=column.name}}</th>
-      {{/each}}
-    </tr>
-    <tr>
-      {{#each column in header}}
-      <th> {{typeahead-widget
-        content=dataTypes
-        selection=column.type
-        }}
-      </th>
-      {{/each}}
-    </tr>
-    </thead>
-    <tbody>
-    {{#each row in rows}}
-    <tr>
-      {{#each item in row.row}}
-      <td>{{item}}</td>
-      {{/each}}
-    </tr>
-    {{/each}}
-    </tbody>
-  </table>
-</div>
-{{/if}}
-{{/unless}}
\ No newline at end of file
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java
new file mode 100644
index 0000000..fb4c4da
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.csv.CSVParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.json.JSONParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class CSVParserTest {
+
+  /**
+   * no exception in creating csvParser with emtpy stream
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyStream() throws IOException {
+    String csv = "";
+
+    StringReader sr = new StringReader(csv);
+
+    CSVParser jp = null;
+
+    try {
+
+      jp = new CSVParser(sr, null);
+
+      Assert.assertEquals("There should not be any rows.",false, jp.iterator().hasNext());
+
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * in case of csv an empty line is still considered as row
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyRow() throws IOException {
+    String csv = "       ";
+    StringReader sr = new StringReader(csv);
+
+    CSVParser jp = null;
+
+    try {
+      jp = new CSVParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should not be empty",new Object[]{"       "},iterator.next().getRow());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws IOException {
+    String csv = "value1,c,10,10.1";
+
+    StringReader sr = new StringReader(csv);
+
+    CSVParser jp = null;
+
+    try {
+      jp = new CSVParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws IOException {
+
+    String csv = "value1,c,10,10.1\n" +
+            "value2,c2,102,true";
+
+    StringReader sr = new StringReader(csv);
+
+    CSVParser jp = null;
+
+    try {
+      jp = new CSVParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
new file mode 100644
index 0000000..479c934
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
@@ -0,0 +1,214 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import com.google.gson.JsonObject;
+import com.google.gson.stream.JsonReader;
+import com.google.gson.stream.JsonToken;
+import com.google.gson.stream.JsonWriter;
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringReader;
+
+public class DataParserCSVTest {
+  @Test
+  public void testParsePreviewCSV() throws IOException {
+    String str = "1,a\n" +
+            "2,b\n" +
+            "3,c\n";
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(3, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
+              new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "1";
+      cols1[1] = "a";
+      Row row1 = new Row(cols1);
+
+      Object cols2[] = new Object[2];
+      cols2[0] = "2";
+      cols2[1] = "b";
+      Row row2 = new Row(cols2);
+
+      Object cols3[] = new Object[2];
+      cols3[0] = "3";
+      cols3[1] = "c";
+      Row row3 = new Row(cols3);
+
+      Row[] rows = {row1, row2, row3};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * more number of columns in a row => igore the extra columns. Number of columns is decided by the first row.
+   * If other row contains more columns then those columns will be ignored
+   * Here first row has 2 columns and second row has 3 columns so the value 'x' is ignored
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws IOException {
+    String str = "1,a\n" +
+            "2,b,x\n" +  // contains 3 cols, more number of columns
+            "3,c\n";
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Row row2 = new Row(new Object[]{"2","b"});
+
+      Assert.assertArrayEquals("Additional columns not properly handled.", row2.getRow(),pd.getPreviewRows().get(1).getRow());
+    } finally {
+      if (null != dp) {
+        dp.close();
+      }
+
+      sr.close();
+    }
+  }
+
+  /**
+   * less number of columns => treat missing values as null. Number of columns is decided by the first row of the table
+   * if other rows has less number of columns then it treats other columns as null
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws IOException {
+    String str = "1,a\n" +
+            "2\n" +  // contains 1 col, less number of columns
+            "3,c\n";
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+//    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Missing value not detected as null.",pd.getPreviewRows().get(1).getRow()[1],null);
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * empty values are treated as empty string
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyColumn() throws IOException {
+    String str = "1,a,x\n" +
+            "2,,y\n" +  // contains 1 col, less number of columns
+            "3,c,z\n";
+//    System.out.println("str : " + str);
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(1).getRow()[1],"");
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * empty values are treated as empty string
+   * @throws IOException
+   */
+  @Test
+  public void testLastEmptyColumn() throws IOException {
+    String str = "1,a,x\n" +
+            "2,,\n" +  // contains 1 col, less number of columns
+            "3,c,z\n";
+//    System.out.println("str : " + str);
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(1).getRow()[1],"");
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(1).getRow()[2],"");
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
new file mode 100644
index 0000000..555d8e3
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
@@ -0,0 +1,209 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class DataParserJSONTest {
+
+  @Test
+  public void testParsePreviewJSON() throws IOException {
+    String str = "[ {\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"5.4\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" },"
+            + "{\"col1\": \"e\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"8.4\" },"
+            + "{\"col1\": \"f\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"9.4\" },"
+            + "{\"col1\": \"g\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"10.4\" },"
+            + "{\"col1\": \"h\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"11.4\" },"
+            + "{\"col1\": \"i\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4\" },"
+            + "{\"col1\": \"j\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"5\" },"
+            + "{\"col1\": \"k\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6\" },"
+            + "{\"col1\": \"l\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7\" },"
+            + "{\"col1\": \"m\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"24.4\" },"
+            + "{\"col1\": \"n\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"14.4\" },"
+            + "{\"col1\": \"o\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"34.4\" },"
+            + "{\"col1\": \"p\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"44.4\" },"
+            + "{\"col1\": \"q\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"54.4\" },"
+            + "{\"col1\": \"r\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"64.4\" }"
+            + "]";
+
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS, 7);
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(8, pd.getPreviewRows().size()); // header row + preview rows
+      Assert.assertEquals(14, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
+              new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
+              new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+              new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.STRING.toString(), 3),
+              new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.STRING.toString(), 4),
+              new ColumnDescriptionImpl("col6", ColumnDescriptionShort.DataTypes.STRING.toString(), 5),
+              new ColumnDescriptionImpl("col7", ColumnDescriptionShort.DataTypes.STRING.toString(), 6),
+              new ColumnDescriptionImpl("col8", ColumnDescriptionShort.DataTypes.STRING.toString(), 7),
+              new ColumnDescriptionImpl("col9", ColumnDescriptionShort.DataTypes.STRING.toString(), 8),
+              new ColumnDescriptionImpl("col10", ColumnDescriptionShort.DataTypes.STRING.toString(), 9),
+              new ColumnDescriptionImpl("col11", ColumnDescriptionShort.DataTypes.STRING.toString(), 10),
+              new ColumnDescriptionImpl("col12", ColumnDescriptionShort.DataTypes.STRING.toString(), 11),
+              new ColumnDescriptionImpl("col13", ColumnDescriptionShort.DataTypes.STRING.toString(), 12),
+              new ColumnDescriptionImpl("col14", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 13)};
+
+      Row row1 = new Row(new Object[]{"col1", "col2", "col3", "col4", "col5", "col6", "col7", "col8", "col9", "col10", "col11", "col12", "col13", "col14"});
+      Row row2 = new Row(new Object[]{"a", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "4.4"});
+      Row row3 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "5.4"});
+      Row row4 = new Row(new Object[]{"c", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "6.4"});
+      Row row5 = new Row(new Object[]{"d", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "7.4"});
+      Row row6 = new Row(new Object[]{"e", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "8.4"});
+      Row row7 = new Row(new Object[]{"f", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "9.4"});
+      Row row8 = new Row(new Object[]{"g", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "10.4"});
+
+      Row[] rows = {row1, row2, row3, row4, row5, row6, row7, row8};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * additional columns in rows of JSON are ignored.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws IOException {
+    String str = "[ {\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" , \"col14\" : \"43.4\" ,\"col15\" : \"asafsfa\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" }"
+            + "]";
+
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "43.4"});
+      Assert.assertArrayEquals("More number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * less columns in json makes them null.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws IOException {
+    String str = "[ " +
+            "{\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\"  },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" }"
+            + "]";
+
+    StringReader sr = new StringReader(str);
+    DataParser dp = null;
+    try {
+      ParseOptions parseOptions = new ParseOptions();
+      parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+
+      Assert.assertNull(pd.getPreviewRows().get(2).getRow()[13]);
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * illegal json format gives error
+   *
+   * @throws IOException
+   */
+  @Test(expected = IllegalArgumentException.class)
+  public void testWrongJsonFormat() throws IOException {
+    String str = "[ " +
+            "{\"col1\" : \"a\", \n\"col2\": \"abcd\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\"  },"
+            + "{\"col1\": \"d\",, \n\"col2\": \"abcd\"  }"       // extra comma in this line
+            + "]";
+    DataParser dp = null;
+    StringReader sr = new StringReader(str);
+
+    try {
+      ParseOptions parseOptions = new ParseOptions();
+      parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserTest.java
deleted file mode 100644
index 3d77d29..0000000
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.upload;
-
-import org.apache.ambari.view.hive.client.ColumnDescription;
-import org.apache.ambari.view.hive.client.ColumnDescriptionShort;
-import org.apache.ambari.view.hive.client.Row;
-import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
-import org.apache.ambari.view.hive.resources.uploads.DataParser;
-import org.apache.ambari.view.hive.resources.uploads.ParseOptions;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.io.StringReader;
-import java.util.ArrayList;
-import java.util.Arrays;
-
-public class DataParserTest {
-
-  @Test
-  public void testDataParser() throws IOException {
-    String str = "1,a\n" +
-            "2,b\n" +
-            "3,c\n";
-    StringReader sr = new StringReader(str);
-
-    ParseOptions parseOptions = new ParseOptions();
-    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.FILE_TYPE_CSV);
-    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER_FIRST_RECORD);
-
-    DataParser dp = new DataParser(sr, parseOptions);
-    dp.parsePreview();
-    Assert.assertNotNull(dp.getPreviewRows());
-    Assert.assertNotNull(dp.getHeader());
-    Assert.assertEquals(3, dp.getPreviewRows().size());
-    Assert.assertEquals(2, dp.getHeader().size());
-    ColumnDescription[] cd = {new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
-            new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
-
-    Assert.assertArrayEquals("Header Not Correct.", cd, dp.getHeader().toArray());
-
-    // TODO : include testing of each row element. Below comparison does not work properly.
-    // Object[] rows = {new Row(new Object[]{'1','a'}),new Row(new Object[]{'2','b'}),new Row(new Object[]{'3','c'})};
-    // Assert.assertArrayEquals("Rows Not Correct.", rows, dp.getPreviewRows().toArray());
-
-    sr.close();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
new file mode 100644
index 0000000..f925dc3
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class DataParserXMLTest {
+
+  @Test
+  public void testParsePreviewXML() throws IOException {
+    String str = "<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(3, pd.getPreviewRows().size()); // header row + preview rows
+      Assert.assertEquals(5, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
+              new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
+              new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+              new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.INT.toString(), 3),
+              new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.INT.toString(), 4)
+      };
+
+      Row row1 = new Row(new Object[]{"col1", "col2", "col3", "col4", "col5"});
+      Row row2 = new Row(new Object[]{"row1-col1-Value", "row1-col2-Value", "row1-col3-Value", "10", "11"});
+      Row row3 = new Row(new Object[]{"row2-col1-Value", "row2-col2-Value", "row2-col3-Value", "20", "21"});
+
+      Row[] rows = {row1, row2, row3};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+
+  /**
+   * additional columns in rows of XML are ignored.
+   * number of columns are decided by the first row of the table and here second row contains more columns so those are ignored.
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws IOException {
+    String str ="<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    StringReader sr = new StringReader(str);
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+
+    DataParser dp = null;
+    try {
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"row2-col1-Value","row2-col2-Value","row2-col3-Value","20","21"});
+      Assert.assertArrayEquals("More number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * less columns in xml makes them null.
+   * number of columns are decided by the first row of the table and here second row does not contain col99 and col100
+   * columns so those are set to null.
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws IOException {
+    String str = "<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    StringReader sr = new StringReader(str);
+    DataParser dp = null;
+    try {
+      ParseOptions parseOptions = new ParseOptions();
+      parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"row2-col1-Value","row2-col2-Value","row2-col3-Value",null,null,"20","21"});
+      Assert.assertArrayEquals("Less number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(2).getRow());
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+
+  /**
+   * illegal xml format gives error. adding illegal tag gives error
+   *
+   * @throws IOException
+   */
+  @Test(expected = IllegalArgumentException.class)
+  public void testWrongXMLFormat() throws IOException {
+    String str = "<table>" +
+            "<row>" +
+            "<ccc></ccc>" +   // illegal tag.
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+    DataParser dp = null;
+    StringReader sr = new StringReader(str);
+
+    try {
+      ParseOptions parseOptions = new ParseOptions();
+      parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+      dp = new DataParser(sr, parseOptions);
+
+      PreviewData pd = dp.parsePreview();
+    } finally {
+      if (null != dp)
+        dp.close();
+
+      sr.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/JsonParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/JsonParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/JsonParserTest.java
new file mode 100644
index 0000000..38ed225
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/JsonParserTest.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.json.JSONParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.xml.XMLParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class JsonParserTest {
+
+  @Test(expected = IOException.class)
+  public void testEmptyStream() throws IOException {
+    String json = "";
+
+    StringReader sr = new StringReader(json);
+
+    JSONParser jp = null;
+
+    try {
+
+      jp = new JSONParser(sr, null);
+
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testEmptyRow() throws IOException {
+    JsonObject jo = new JsonObject();
+    JsonArray ja = new JsonArray();
+    ja.add(jo);
+    String json = ja.toString();
+
+    StringReader sr = new StringReader(json);
+
+    JSONParser jp = null;
+
+    try {
+      jp = new JSONParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should not be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should be empty",new Object[]{},iterator.next().getRow());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+
+  @Test
+  public void testEmptyTable() throws IOException {
+    JsonArray ja = new JsonArray();
+    String json = ja.toString();
+
+    StringReader sr = new StringReader(json);
+
+    JSONParser jp = null;
+
+    try {
+      jp = new JSONParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws IOException {
+    JsonObject jo = new JsonObject();
+    jo.addProperty("key1","value1");
+    jo.addProperty("key2",'c');
+    jo.addProperty("key3",10);
+    jo.addProperty("key4",10.1);
+
+    JsonArray ja = new JsonArray();
+    ja.add(jo);
+    String json = ja.toString();
+
+    StringReader sr = new StringReader(json);
+
+    JSONParser jp = null;
+
+    try {
+      jp = new JSONParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws IOException {
+    JsonObject jo1 = new JsonObject();
+    jo1.addProperty("key1","value1");
+    jo1.addProperty("key2","c");
+    jo1.addProperty("key3","10");
+    jo1.addProperty("key4","10.1");
+
+    JsonObject jo2 = new JsonObject();
+    jo2.addProperty("key1","value2");
+    jo2.addProperty("key2","c2");
+    jo2.addProperty("key3","102");
+    jo2.addProperty("key4",true);
+
+
+    JsonArray ja = new JsonArray();
+    ja.add(jo1);
+    ja.add(jo2);
+
+    String json = ja.toString();
+    StringReader sr = new StringReader(json);
+
+    JSONParser jp = null;
+
+    try {
+      jp = new JSONParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java
new file mode 100644
index 0000000..5941aaaa
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.HiveFileType;
+import org.apache.ambari.view.hive.resources.uploads.query.DeleteQueryInput;
+import org.apache.ambari.view.hive.resources.uploads.query.InsertFromQueryInput;
+import org.apache.ambari.view.hive.resources.uploads.query.QueryGenerator;
+import org.apache.ambari.view.hive.resources.uploads.query.TableInfo;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class QueryGeneratorTest {
+  @Test
+  public void testCreateTextFile() {
+
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
+
+    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.TEXTFILE);
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("Create query for text file not correct ","create table tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE;",qg.generateCreateQuery(ti));
+  }
+
+  @Test
+  public void testCreateORC() {
+
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
+
+    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.ORC);
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("Create query for text file not correct ","create table tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) STORED AS ORC;",qg.generateCreateQuery(ti));
+  }
+
+  @Test
+  public void testInsertFromQuery() {
+
+    InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable");
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("insert from one table to another not correct ","insert into table toDB.toTable select * from fromDB.fromTable",qg.generateInsertFromQuery(ifqi));
+  }
+
+  @Test
+  public void testDropTableQuery() {
+
+    DeleteQueryInput deleteQueryInput = new DeleteQueryInput("dbName","tableName");
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("drop table query not correct ","drop table dbName.tableName",qg.generateDropTableQuery(deleteQueryInput ));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java
new file mode 100644
index 0000000..d94eace
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.TableDataReader;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+
+public class TableDataReaderTest {
+
+  private class RowIter implements Iterator<Row> {
+    int numberOfRows;
+    int numberOfCols;
+    int index = 0 ;
+    ArrayList<Row> rows = new ArrayList<Row>();
+    public RowIter(int numberOfRows, int numberOfCols){
+      this.numberOfRows = numberOfRows;
+      this.numberOfCols = numberOfCols;
+      int x = 0 ;
+      for(int i = 0; i < this.numberOfRows; i++ ){
+        Object [] objArray = new Object[10];
+        for(int j = 0; j < this.numberOfCols; j++ ){
+          objArray[j] = x++ + "" ;
+        }
+        Row row = new Row(objArray);
+        rows.add(row);
+      }
+    }
+    @Override
+    public boolean hasNext() {
+      return index < numberOfRows;
+    }
+
+    @Override
+    public Row next() {
+      return rows.get(index++);
+    }
+
+    @Override
+    public void remove() {
+      throw new RuntimeException("Operation not supported.");
+    }
+
+    @Override
+    public String toString() {
+      return "RowIter{" +
+              "index=" + index +
+              ", rows=" + rows +
+              '}';
+    }
+  }
+
+  @Test
+  public void testCSVReader() throws IOException {
+    RowIter rowIter = new RowIter(10,10);
+
+    TableDataReader tableDataReader = new TableDataReader(rowIter);
+
+    char[] first10 = "0,1,2,3,4,".toCharArray();
+    char [] buf = new char[10];
+    tableDataReader.read(buf,0,10);
+
+//    System.out.println("first10 : " + Arrays.toString(first10));
+//    System.out.println("buf : " + Arrays.toString(buf));
+    Assert.assertArrayEquals(first10,buf);
+
+
+    char[] next11 = "5,6,7,8,9\n1".toCharArray();
+    char [] buf1 = new char[11];
+    tableDataReader.read(buf1,0,11);
+
+//    System.out.println("next11 : " + Arrays.toString(next11));
+//    System.out.println("buf1 : " + Arrays.toString(buf1));
+    Assert.assertArrayEquals(next11,buf1);
+
+    // read it fully
+    while( tableDataReader.read(buf,0,10) != -1 );
+
+    char [] last10 = "97,98,99\n,".toCharArray(); // last comma is the left over of previous read.
+//    System.out.println("last10 : " + Arrays.toString(last10));
+//    System.out.println("buf : " + Arrays.toString(buf));
+
+    Assert.assertArrayEquals(last10,buf);
+  }
+
+  @Test
+  public void testEmptyCSVReader() throws IOException {
+    RowIter rowIter = new RowIter(0,0);
+
+    TableDataReader tableDataReader = new TableDataReader(rowIter);
+
+    char[] first10 = new char [10];
+    char [] buf = new char[10];
+    for( int i = 0 ; i < 10 ; i++ ){
+      first10[i] = '\0';
+      buf[i] = '\0';
+    }
+
+    tableDataReader.read(buf,0,10);
+
+//    System.out.println("first10 : " + Arrays.toString(first10));
+//    System.out.println("buf : " + Arrays.toString(buf));
+    Assert.assertArrayEquals(first10,buf);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/XMLParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/XMLParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/XMLParserTest.java
new file mode 100644
index 0000000..d048a62
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/XMLParserTest.java
@@ -0,0 +1,180 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.json.JSONParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.xml.XMLParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class XMLParserTest {
+
+  @Test(expected = IOException.class)
+  public void testEmptyStream() throws IOException {
+    String xml = "";
+
+    StringReader sr = new StringReader(xml);
+
+    XMLParser jp = null;
+
+    try {
+
+      jp = new XMLParser(sr, null);
+
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testEmptyRow() throws IOException {
+    String xml = "<table><row></row></table>";
+
+    StringReader sr = new StringReader(xml);
+
+    XMLParser jp = null;
+
+    try {
+      jp = new XMLParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should not be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should be empty",new Object[]{},iterator.next().getRow());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+
+  @Test
+  public void testEmptyTable() throws IOException {
+    String xml = "<table></table>";
+    StringReader sr = new StringReader(xml);
+
+    XMLParser jp = null;
+
+    try {
+      jp = new XMLParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws IOException {
+
+    String xml =
+    "<table>"
+    + "<row>"
+    + "<col name=\"key1\">value1</col>"
+    + "<col name=\"key2\">c</col>"
+    + "<col name=\"key3\">10</col>"
+    + "<col name=\"key4\">10.1</col>"
+    + "</row>"
+    + "</table>"  ;
+
+    StringReader sr = new StringReader(xml);
+
+    XMLParser jp = null;
+
+    try {
+      jp = new XMLParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws IOException {
+    String xml =
+    "<table>"
+    + "<row>"
+    + "<col name=\"key1\">value1</col>"
+    + "<col name=\"key2\">c</col>"
+    + "<col name=\"key3\">10</col>"
+    + "<col name=\"key4\">10.1</col>"
+    + "</row>"
+    + "<row>"
+    + "<col name=\"key1\">value2</col>"
+    + "<col name=\"key2\">c2</col>"
+    + "<col name=\"key3\">102</col>"
+    + "<col name=\"key4\">true</col>"
+    + "</row>"
+    + "</table>"  ;
+
+
+
+    StringReader sr = new StringReader(xml);
+
+    XMLParser jp = null;
+
+    try {
+      jp = new XMLParser(sr, null);
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }finally{
+      if( null != jp )
+        jp.close();
+
+      sr.close();
+    }
+  }
+}


[3/3] ambari git commit: AMBARI-15046. Hive view Upload table feature now supports upload from HDFS and Local.Supports JSON/XML/CSV as uploaded files and ORC and all other hive internal storage types. (Nitiraj Rathore via pallavkul)

Posted by pa...@apache.org.
AMBARI-15046. Hive view Upload table feature now supports upload from HDFS and Local.Supports JSON/XML/CSV as uploaded files and ORC and all other hive internal storage types. (Nitiraj Rathore via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0747b6c7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0747b6c7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0747b6c7

Branch: refs/heads/branch-2.2
Commit: 0747b6c77020b9d42551fe0437968b7658148b37
Parents: fc8f637
Author: Pallav Kulshreshtha <pa...@gmail.com>
Authored: Fri Feb 26 15:28:24 2016 +0530
Committer: Pallav Kulshreshtha <pa...@gmail.com>
Committed: Fri Feb 26 15:28:24 2016 +0530

----------------------------------------------------------------------
 contrib/views/hive/pom.xml                      |   5 +
 .../org/apache/ambari/view/hive/client/Row.java |   4 +-
 .../view/hive/resources/jobs/JobService.java    |  17 +
 .../view/hive/resources/uploads/CSVParser.java  | 185 ------
 .../uploads/ColumnDescriptionImpl.java          |  55 +-
 .../view/hive/resources/uploads/DataParser.java |  63 --
 .../hive/resources/uploads/HiveFileType.java    |  30 +
 .../view/hive/resources/uploads/IParser.java    |  37 --
 .../hive/resources/uploads/ParseOptions.java    |  46 --
 .../view/hive/resources/uploads/ParseUtils.java | 103 ----
 .../hive/resources/uploads/QueryGenerator.java  |  66 --
 .../hive/resources/uploads/TableDataReader.java |  86 +++
 .../view/hive/resources/uploads/TableInfo.java  |  62 --
 .../view/hive/resources/uploads/TableInput.java |  90 +++
 .../resources/uploads/UploadFromHdfsInput.java  |  91 +++
 .../hive/resources/uploads/UploadService.java   | 462 ++++++++------
 .../resources/uploads/parsers/DataParser.java   |  72 +++
 .../uploads/parsers/EndOfDocumentException.java |  41 ++
 .../hive/resources/uploads/parsers/IParser.java |  48 ++
 .../resources/uploads/parsers/ParseOptions.java |  47 ++
 .../resources/uploads/parsers/ParseUtils.java   | 134 +++++
 .../hive/resources/uploads/parsers/Parser.java  | 154 +++++
 .../resources/uploads/parsers/PreviewData.java  |  56 ++
 .../resources/uploads/parsers/RowIterator.java  |  96 +++
 .../uploads/parsers/RowMapIterator.java         |  29 +
 .../uploads/parsers/csv/CSVIterator.java        |  57 ++
 .../uploads/parsers/csv/CSVParser.java          |  55 ++
 .../uploads/parsers/json/JSONIterator.java      | 160 +++++
 .../uploads/parsers/json/JSONParser.java        |  85 +++
 .../uploads/parsers/xml/XMLIterator.java        | 195 ++++++
 .../uploads/parsers/xml/XMLParser.java          | 100 ++++
 .../uploads/query/DeleteQueryInput.java         |  48 ++
 .../uploads/query/InsertFromQueryInput.java     |  68 +++
 .../resources/uploads/query/LoadQueryInput.java |  67 +++
 .../resources/uploads/query/QueryGenerator.java |  98 +++
 .../hive/resources/uploads/query/TableInfo.java |  83 +++
 .../ui/hive-web/app/adapters/upload-table.js    |  67 ++-
 .../ui/hive-web/app/components/input-header.js  |  61 ++
 .../ui/hive-web/app/components/radio-button.js  |  39 ++
 .../ui/hive-web/app/controllers/upload-table.js | 598 +++++++++++++++----
 .../ui/hive-web/app/initializers/i18n.js        |   1 +
 .../resources/ui/hive-web/app/services/job.js   |  15 +
 .../resources/ui/hive-web/app/styles/app.scss   |  59 +-
 .../app/templates/components/input-header.hbs   |  20 +
 .../ui/hive-web/app/templates/upload-table.hbs  | 202 +++++--
 .../hive/resources/upload/CSVParserTest.java    | 144 +++++
 .../resources/upload/DataParserCSVTest.java     | 214 +++++++
 .../resources/upload/DataParserJSONTest.java    | 209 +++++++
 .../hive/resources/upload/DataParserTest.java   |  65 --
 .../resources/upload/DataParserXMLTest.java     | 233 ++++++++
 .../hive/resources/upload/JsonParserTest.java   | 184 ++++++
 .../resources/upload/QueryGeneratorTest.java    |  84 +++
 .../resources/upload/TableDataReaderTest.java   | 127 ++++
 .../hive/resources/upload/XMLParserTest.java    | 180 ++++++
 54 files changed, 4598 insertions(+), 999 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive/pom.xml b/contrib/views/hive/pom.xml
index 9f3f1f7..9ad4296 100644
--- a/contrib/views/hive/pom.xml
+++ b/contrib/views/hive/pom.xml
@@ -221,6 +221,11 @@
       <artifactId>httpcore</artifactId>
       <version>4.4.3</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-csv</artifactId>
+      <version>1.1</version>
+    </dependency>
   </dependencies>
 
   <properties>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Row.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Row.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Row.java
index 35f216b..cfce1f0 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Row.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Row.java
@@ -56,8 +56,8 @@ public class Row {
 
     Row row1 = (Row) o;
 
-    // Probably incorrect - comparing Object[] arrays with Arrays.equals
-    return Arrays.equals(row, row1.row);
+    boolean retValue = Arrays.equals(row, row1.row);
+    return retValue;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
index fd69893..f7f883b 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
@@ -259,6 +259,23 @@ public class JobService extends BaseService {
     }
   }
 
+
+  @Path("{jobId}/status")
+  @GET
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response fetchJobStatus(@PathParam("jobId") String jobId) throws ItemNotFound, HiveClientException, NoOperationStatusSetException {
+    JobController jobController = getResourceManager().readController(jobId);
+    String jobStatus = jobController.getStatus().status;
+    LOG.info("jobStatus : {} for jobId : {}",jobStatus, jobId);
+
+    JSONObject jsonObject = new JSONObject();
+    jsonObject.put("jobStatus", jobStatus);
+    jsonObject.put("jobId", jobId);
+
+    return Response.ok(jsonObject).build();
+  }
+
   /**
    * Get next results page
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/CSVParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/CSVParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/CSVParser.java
deleted file mode 100644
index 388cf53..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/CSVParser.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.hive.resources.uploads;
-
-import org.apache.ambari.view.hive.client.ColumnDescription;
-import org.apache.ambari.view.hive.client.ColumnDescriptionShort;
-import org.apache.ambari.view.hive.client.Row;
-import org.apache.commons.csv.CSVFormat;
-import org.apache.commons.csv.CSVRecord;
-
-import java.io.*;
-import java.util.*;
-
-/**
- * Parses the given Reader and extracts headers and rows, and detect datatypes of columns
- */
-public class CSVParser implements IParser {
-
-  static class CSVIterator implements Iterator<Row> {
-
-    private Iterator<CSVRecord> iterator;
-
-    public CSVIterator(Iterator<CSVRecord> iterator) {
-      this.iterator = iterator;
-    }
-
-    @Override
-    public boolean hasNext() {
-      return iterator.hasNext();
-    }
-
-    @Override
-    public Row next() {
-      CSVRecord row = iterator.next();
-      Object[] values = new Object[row.size()];
-      for (int i = 0; i < values.length; i++) {
-        values[i] = row.get(i);
-      }
-      Row r = new Row(values);
-      return r;
-    }
-
-    @Override
-    public void remove() {
-      this.iterator.remove();
-    }
-  }
-
-  private Reader originalReader; // same as CSV reader in this case
-  private ParseOptions parseOptions;
-  private CSVIterator iterator;
-  private List<Row> previewRows;
-  private List<ColumnDescription> header;
-  private boolean isHeaderFirstRow = false;
-  private int numberOfPreviewRows = 10;
-  private org.apache.commons.csv.CSVParser parser;
-
-  public CSVParser(Reader reader, ParseOptions parseOptions) throws IOException {
-    this.originalReader = reader;
-    this.parseOptions = parseOptions;
-    // always create without headers
-    parser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.EXCEL);
-    iterator = new CSVIterator(parser.iterator());
-  }
-
-  public void parsePreview() {
-    try {
-      numberOfPreviewRows = (Integer) parseOptions.getOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS);
-    } catch (Exception e) {
-    }
-
-    int numberOfRows = numberOfPreviewRows;
-    previewRows = new ArrayList<Row>(numberOfPreviewRows); // size including the header.
-
-    Row headerRow = null;
-    if (parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER_FIRST_RECORD)) {
-      if (!this.iterator().hasNext()) {
-        throw new NoSuchElementException("Cannot parse Header");
-      }
-      isHeaderFirstRow = true;
-      headerRow = iterator().next();
-      previewRows.add(headerRow);
-    }
-
-    // find data types.
-    int[][] typeCounts = null;
-    Row r = null;
-    int numOfCols = 0;
-    if (iterator().hasNext()) {
-      r = iterator().next();
-      numOfCols = r.getRow().length;
-      typeCounts = new int[numOfCols][ColumnDescription.DataTypes.values().length];
-    } else {
-      throw new NoSuchElementException("No rows in the csv.");
-    }
-
-    while (true) {
-      // create Header definition from row
-      Object[] values = r.getRow();
-      previewRows.add(r);
-
-      if (values.length != numOfCols)
-        throw new IllegalArgumentException("Illegal number of cols for row : " + r);
-
-      for (int colNum = 0; colNum < values.length; colNum++) {
-        // detect type
-        ColumnDescription.DataTypes type = ParseUtils.detectHiveDataType(values[colNum]);
-        typeCounts[colNum][type.ordinal()]++;
-      }
-      numberOfRows--;
-      if (numberOfRows <= 0 || !iterator().hasNext())
-        break;
-
-      r = iterator().next();
-    }
-    ;
-
-    if (previewRows.size() <= 0)
-      throw new NoSuchElementException("Does not contain any rows.");
-
-    header = new ArrayList<ColumnDescription>(numOfCols);
-    for (int colNum = 0; colNum < numOfCols; colNum++) {
-      int dataTypeId = getLikelyDataType(typeCounts, colNum);
-      ColumnDescription.DataTypes type = ColumnDescription.DataTypes.values()[dataTypeId];
-      String colName = "Column" + colNum;
-      if (null != headerRow)
-        colName = (String) headerRow.getRow()[colNum];
-
-      ColumnDescription cd = new ColumnDescriptionImpl(colName, type.toString(), colNum);
-      header.add(cd);
-    }
-  }
-
-  /**
-   * returns which datatype was detected for the maximum number of times in the given column
-   * @param typeCounts
-   * @param colNum
-   * @return
-   */
-  private int getLikelyDataType(int[][] typeCounts, int colNum) {
-    int[] colArray = typeCounts[colNum];
-    int maxIndex = 0;
-    int i = 1;
-    for (; i < colArray.length; i++) {
-      if (colArray[i] > colArray[maxIndex])
-        maxIndex = i;
-    }
-
-    return maxIndex;
-  }
-
-  @Override
-  public Reader getCSVReader() {
-    return originalReader;
-  }
-
-  @Override
-  public List<ColumnDescription> getHeader() {
-    return header;
-  }
-
-  @Override
-  public List<Row> getPreviewRows() {
-    return this.previewRows;
-  }
-
-  public Iterator<Row> iterator() {
-    return iterator; // only one iterator per parser.
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ColumnDescriptionImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ColumnDescriptionImpl.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ColumnDescriptionImpl.java
index 50f5036..229b7ed 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ColumnDescriptionImpl.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ColumnDescriptionImpl.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p/>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p/>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -22,10 +22,24 @@ import org.apache.ambari.view.hive.client.ColumnDescription;
 
 import java.io.Serializable;
 
+/**
+ * implementation of ColumnDescription which also includes scale and precision.
+ */
 public class ColumnDescriptionImpl implements ColumnDescription, Serializable {
   private String name;
   private String type;
   private int position;
+  /**
+   * can be null
+   */
+  private Integer precision;
+  /**
+   * can be null
+   */
+  private Integer scale;
+
+  public ColumnDescriptionImpl() {
+  }
 
   public ColumnDescriptionImpl(String name, String type, int position) {
     this.name = name;
@@ -33,6 +47,21 @@ public class ColumnDescriptionImpl implements ColumnDescription, Serializable {
     this.position = position;
   }
 
+  public ColumnDescriptionImpl(String name, String type, int position, int precision) {
+    this.name = name;
+    this.type = type;
+    this.position = position;
+    this.precision = precision;
+  }
+
+  public ColumnDescriptionImpl(String name, String type, int position, int precision, int scale) {
+    this.name = name;
+    this.type = type;
+    this.position = position;
+    this.precision = precision;
+    this.scale = scale;
+  }
+
   @Override
   public String getName() {
     return name;
@@ -63,6 +92,22 @@ public class ColumnDescriptionImpl implements ColumnDescription, Serializable {
     this.position = position;
   }
 
+  public Integer getPrecision() {
+    return precision;
+  }
+
+  public Integer getScale() {
+    return scale;
+  }
+
+  public void setPrecision(Integer precision) {
+    this.precision = precision;
+  }
+
+  public void setScale(Integer scale) {
+    this.scale = scale;
+  }
+
   @Override
   public boolean equals(Object o) {
     if (this == o) return true;
@@ -88,8 +133,10 @@ public class ColumnDescriptionImpl implements ColumnDescription, Serializable {
   public String toString() {
     return new StringBuilder().append("ColumnDescriptionImpl[")
             .append("name : ").append(name)
-            .append("type : " + type)
-            .append("position : " + position)
+            .append(", type : " + type)
+            .append(", position : " + position)
+            .append(", precision : " + precision)
+            .append(", scale : " + scale)
             .append("]").toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/DataParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/DataParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/DataParser.java
deleted file mode 100644
index 5f2db55..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/DataParser.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.uploads;
-
-import org.apache.ambari.view.hive.client.ColumnDescription;
-import org.apache.ambari.view.hive.client.Row;
-
-import java.io.IOException;
-import java.io.Reader;
-import java.util.Iterator;
-import java.util.List;
-
-public class DataParser implements IParser {
-
-  private IParser parser;
-
-  public DataParser(Reader reader, ParseOptions parseOptions) throws IOException {
-    if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.FILE_TYPE_CSV)) {
-      parser = new CSVParser(reader, parseOptions);
-    }
-  }
-
-  @Override
-  public Reader getCSVReader() {
-    return parser.getCSVReader();
-  }
-
-  @Override
-  public List<ColumnDescription> getHeader() {
-    return parser.getHeader();
-  }
-
-  @Override
-  public List<Row> getPreviewRows() {
-    return parser.getPreviewRows();
-  }
-
-  @Override
-  public void parsePreview() {
-    parser.parsePreview();
-  }
-
-  @Override
-  public Iterator<Row> iterator() {
-    return parser.iterator();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/HiveFileType.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/HiveFileType.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/HiveFileType.java
new file mode 100644
index 0000000..6cc1d46
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/HiveFileType.java
@@ -0,0 +1,30 @@
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads;
+
+public enum HiveFileType {
+  SEQUENCEFILE,
+  TEXTFILE,
+  RCFILE,
+  ORC,
+  PARQUET,
+  AVRO,
+  INPUTFORMAT;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/IParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/IParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/IParser.java
deleted file mode 100644
index c478b70..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/IParser.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.uploads;
-
-import org.apache.ambari.view.hive.client.ColumnDescription;
-import org.apache.ambari.view.hive.client.Row;
-
-import java.io.File;
-import java.io.InputStream;
-import java.io.Reader;
-import java.util.List;
-
-public interface IParser extends Iterable<Row> {
-  public Reader getCSVReader();
-
-  public List<ColumnDescription> getHeader();
-
-  public List<Row> getPreviewRows();
-
-  public void parsePreview();
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ParseOptions.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ParseOptions.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ParseOptions.java
deleted file mode 100644
index 2ec3b1b..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ParseOptions.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.uploads;
-
-import java.util.HashMap;
-
-public class ParseOptions {
-  final public static String OPTIONS_FILE_TYPE = "FILE_TYPE";
-  final public static String OPTIONS_HEADER = "HEADER";
-  final public static String OPTIONS_NUMBER_OF_PREVIEW_ROWS = "NUMBER_OF_PREVIEW_ROWS";
-
-  final public static String FILE_TYPE_CSV = "CSV";
-  final public static String FILE_TYPE_JSON = "JSON";
-  final public static String XML = "XML";
-
-  final public static String HEADER_FIRST_RECORD = "FIRST_RECORD";
-  final public static String HEADER_PROVIDED_BY_USER = "PROVIDED_BY_USER";
-
-  final public static String HEADERS = "HEADERS";
-
-  private HashMap<String, Object> options = new HashMap<String, Object>();
-
-  public void setOption(String key, Object value) {
-    this.options.put(key, value);
-  }
-
-  public Object getOption(String key) {
-    return this.options.get(key);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ParseUtils.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ParseUtils.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ParseUtils.java
deleted file mode 100644
index aea370e..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/ParseUtils.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.uploads;
-
-import org.apache.ambari.view.hive.client.ColumnDescription;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-public class ParseUtils {
-
-  final public static String[] DATE_FORMATS = {"mm/dd/yyyy", "dd/mm/yyyy", "mm-dd-yyyy" /*add more formatss*/};
-
-  public static boolean isInteger(Object object) {
-    if (object == null)
-      return false;
-
-    if (object instanceof Integer)
-      return true;
-
-    try {
-      Integer i = Integer.parseInt(object.toString());
-      return true;
-    } catch (NumberFormatException nfe) {
-      return false;
-    }
-  }
-
-  public static boolean isDouble(Object object) {
-    if (object == null)
-      return false;
-
-    if (object instanceof Double)
-      return true;
-
-    try {
-      Double i = Double.parseDouble(object.toString());
-      return true;
-    } catch (NumberFormatException nfe) {
-      return false;
-    }
-  }
-
-  public static boolean isChar(Object object) {
-    if (object == null)
-      return false;
-
-    if (object instanceof Character)
-      return true;
-
-    String str = object.toString().trim();
-    if (str.length() == 1)
-      return true;
-
-    return false;
-  }
-
-  public static boolean isDate(Object object) {
-    if (object == null)
-      return false;
-
-    if (object instanceof Date)
-      return true;
-
-    String str = object.toString();
-    for (String format : DATE_FORMATS) {
-      try {
-        Date i = new SimpleDateFormat(format).parse(str);
-        return true;
-      } catch (Exception e) {
-      }
-    }
-
-    return false;
-  }
-
-  public static ColumnDescription.DataTypes detectHiveDataType(Object object) {
-    // detect Integer
-    if (isInteger(object)) return ColumnDescription.DataTypes.INT;
-    if (isDouble(object)) return ColumnDescription.DataTypes.DOUBLE;
-    if (isDate(object)) return ColumnDescription.DataTypes.DATE;
-    if (isChar(object)) return ColumnDescription.DataTypes.CHAR;
-
-    return ColumnDescription.DataTypes.STRING;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/QueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/QueryGenerator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/QueryGenerator.java
deleted file mode 100644
index 98616cf..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/QueryGenerator.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.uploads;
-
-import org.apache.ambari.view.hive.client.ColumnDescription;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-
-/**
- * generates the sql query from given data
- */
-public class QueryGenerator {
-  protected final static Logger LOG =
-          LoggerFactory.getLogger(QueryGenerator.class);
-
-  public String generateCreateQuery(TableInfo tableInfo) {
-    String tableName = tableInfo.getTableName();
-    List<ColumnDescription> cdList = tableInfo.getColumns();
-
-    StringBuilder query = new StringBuilder();
-    query.append("create table " + tableName + " (");
-    Collections.sort(cdList, new Comparator<ColumnDescription>() {
-      @Override
-      public int compare(ColumnDescription o1, ColumnDescription o2) {
-        return o1.getPosition() - o2.getPosition();
-      }
-    });
-
-    boolean first = true;
-    for (ColumnDescription cd : cdList) {
-      if (first) {
-        first = false;
-      } else {
-        query.append(", ");
-      }
-
-      query.append(cd.getName() + " " + cd.getType());
-    }
-
-    query.append(") ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE;");
-
-    String queryString = query.toString();
-    LOG.info("Query : %S", queryString);
-    return queryString;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableDataReader.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableDataReader.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableDataReader.java
new file mode 100644
index 0000000..e9bdb92
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableDataReader.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVPrinter;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.Iterator;
+
+/**
+ * Takes row iterator as input.
+ * iterate over rows and creates a CSV formated stream separating rows by endline "\n"
+ * Note : column values should not contain "\n".
+ */
+public class TableDataReader extends Reader {
+
+  private static final int CAPACITY = 1024;
+  private StringReader stringReader = new StringReader("");
+
+  private Iterator<Row> iterator;
+  private static final CSVFormat CSV_FORMAT = CSVFormat.DEFAULT.withRecordSeparator("\n");
+
+  public TableDataReader(Iterator<Row> rowIterator) {
+    this.iterator = rowIterator;
+  }
+
+  @Override
+  public int read(char[] cbuf, int off, int len) throws IOException {
+
+    int totalLen = len;
+    int count = 0;
+    do {
+      int n = stringReader.read(cbuf, off, len);
+
+      if (n != -1) {
+        // n  were read
+        len = len - n; // len more to be read
+        off = off + n; // off now shifted to n more
+        count += n;
+      }
+
+      if (count == totalLen) return count; // all totalLen characters were read
+
+      if (iterator.hasNext()) { // keep reading as long as we keep getting rows
+        StringWriter stringWriter = new StringWriter(CAPACITY);
+        CSVPrinter csvPrinter = new CSVPrinter(stringWriter, CSV_FORMAT);
+        Row row = iterator.next();
+        csvPrinter.printRecord(row.getRow());
+        stringReader.close(); // close the old string reader
+        stringReader = new StringReader(stringWriter.getBuffer().toString());
+        csvPrinter.close();
+        stringWriter.close();
+      } else {
+        return count == 0 ? -1 : count;
+      }
+    } while (count < totalLen);
+
+    return count;
+  }
+
+  @Override
+  public void close() throws IOException {
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInfo.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInfo.java
deleted file mode 100644
index ed4943d..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInfo.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.uploads;
-
-import org.apache.ambari.view.hive.client.ColumnDescription;
-
-import java.util.List;
-
-public class TableInfo {
-  private String tableName;
-  private String databaseName;
-  private List<ColumnDescription> columns;
-
-  public String getTableName() {
-    return tableName;
-  }
-
-  public void setTableName(String tableName) {
-    this.tableName = tableName;
-  }
-
-  public String getDatabaseName() {
-    return databaseName;
-  }
-
-  public void setDatabaseName(String databaseName) {
-    this.databaseName = databaseName;
-  }
-
-  public List<ColumnDescription> getColumns() {
-    return columns;
-  }
-
-  public void setColumns(List<ColumnDescription> columns) {
-    this.columns = columns;
-  }
-
-  public TableInfo(String databaseName, String tableName, List<ColumnDescription> columns) {
-    this.tableName = tableName;
-    this.databaseName = databaseName;
-    this.columns = columns;
-  }
-
-  public TableInfo() {
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
new file mode 100644
index 0000000..4a50e93
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads;
+
+import java.util.List;
+
+/**
+ * used as input in REST call
+ */
+class TableInput {
+  public Boolean isFirstRowHeader;
+  public List<ColumnDescriptionImpl> header;
+  public String tableName;
+  public String databaseName;
+  /**
+   * the format of the file created for the table inside hive : ORC TEXTFILE etc.
+   */
+  public String fileType;
+  /**
+   * the format of the file uploaded. CSV, JSON, XML etc.
+   */
+  public String fileFormat;
+
+  public TableInput() {
+  }
+
+  public Boolean getIsFirstRowHeader() {
+    return isFirstRowHeader;
+  }
+
+  public void setIsFirstRowHeader(Boolean isFirstRowHeader) {
+    this.isFirstRowHeader = isFirstRowHeader;
+  }
+
+  public List<ColumnDescriptionImpl> getHeader() {
+    return header;
+  }
+
+  public void setHeader(List<ColumnDescriptionImpl> header) {
+    this.header = header;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public String getFileType() {
+    return fileType;
+  }
+
+  public void setFileType(String fileType) {
+    this.fileType = fileType;
+  }
+
+  public String getFileFormat() {
+    return fileFormat;
+  }
+
+  public void setFileFormat(String fileFormat) {
+    this.fileFormat = fileFormat;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
new file mode 100644
index 0000000..9b052ab
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads;
+
+import java.io.Serializable;
+
+public class UploadFromHdfsInput implements Serializable{
+  private Boolean isFirstRowHeader;
+  private String inputFileType;
+  private String hdfsPath;
+  private String tableName;
+  private String databaseName;
+
+  public UploadFromHdfsInput() {
+  }
+
+  public UploadFromHdfsInput(Boolean isFirstRowHeader, String inputFileType, String hdfsPath, String tableName, String databaseName) {
+    this.isFirstRowHeader = isFirstRowHeader;
+    this.inputFileType = inputFileType;
+    this.hdfsPath = hdfsPath;
+    this.tableName = tableName;
+    this.databaseName = databaseName;
+  }
+
+  public Boolean getIsFirstRowHeader() {
+    return isFirstRowHeader;
+  }
+
+  public void setIsFirstRowHeader(Boolean firstRowHeader) {
+    isFirstRowHeader = firstRowHeader;
+  }
+
+  public String getInputFileType() {
+    return inputFileType;
+  }
+
+  public void setInputFileType(String inputFileType) {
+    this.inputFileType = inputFileType;
+  }
+
+  public String getHdfsPath() {
+    return hdfsPath;
+  }
+
+  public void setHdfsPath(String hdfsPath) {
+    this.hdfsPath = hdfsPath;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  @Override
+  public String toString() {
+    return "UploadFromHdfsInput{" +
+            "isFirstRowHeader=" + isFirstRowHeader +
+            ", inputFileType='" + inputFileType + '\'' +
+            ", hdfsPath='" + hdfsPath + '\'' +
+            ", tableName='" + tableName + '\'' +
+            ", databaseName='" + databaseName + '\'' +
+            '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
index 8b5b851..6935ec5 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
@@ -21,40 +21,44 @@ package org.apache.ambari.view.hive.resources.uploads;
 import com.sun.jersey.core.header.FormDataContentDisposition;
 import com.sun.jersey.multipart.FormDataParam;
 import org.apache.ambari.view.hive.BaseService;
-import org.apache.ambari.view.hive.client.ColumnDescription;
-import org.apache.ambari.view.hive.client.HiveClientException;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.hive.resources.jobs.NoOperationStatusSetException;
 import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobController;
 import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl;
 import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobResourceManager;
+import org.apache.ambari.view.hive.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.PreviewData;
+import org.apache.ambari.view.hive.resources.uploads.query.*;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.apache.ambari.view.utils.ambari.AmbariApi;
+import org.apache.commons.io.FilenameUtils;
 import org.apache.commons.io.input.ReaderInputStream;
+import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
-import org.json.simple.JSONArray;
 import org.json.simple.JSONObject;
-import org.json.simple.JSONValue;
 
 import javax.ws.rs.*;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import java.io.*;
 import java.lang.reflect.InvocationTargetException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 /**
- * Servlet for queries
+ * UI driven end points for creation of new hive table and inserting data into it.
+ * It uploads a file, parses it partially based on its type, generates preview,
+ * creates temporary hive table for storage as CSV and actual hive table,
+ * uploads the file again, parses it, create CSV stream and upload to hdfs in temporary table,
+ * insert rows from temporary table to actual table, delete temporary table.
+ * <p/>
  * API:
- * POST /preview
- * POST /upload
- * POST /createTable
- * GET /createTable/status
+ * POST /preview : takes stream, parses it and returns preview rows, headers and column type suggestions
+ * POST /createTable : runs hive query to create table in hive
+ * POST /upload : takes stream, parses it and converts it into CSV and uploads it to the temporary table
+ * POST /insertIntoTable : runs hive query to insert data from temporary table to actual hive table
+ * POST /deleteTable : deletes the temporary table
  */
 public class UploadService extends BaseService {
 
@@ -62,107 +66,255 @@ public class UploadService extends BaseService {
 
   protected JobResourceManager resourceManager;
 
-  final private String HIVE_META_STORE_LOCATION_KEY = "hive.metastore.warehouse.dir";
-  final private String HIVE_SITE = "hive-site";
-  final private String HIVE_DEFAULT_DB = "default";
+  final private static String HIVE_META_STORE_LOCATION_KEY = "hive.metastore.warehouse.dir";
+  final private static String HIVE_SITE = "hive-site";
+  final private static String HIVE_DEFAULT_DB = "default";
+
+  @POST
+  @Path("/previewFromHdfs")
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response uploadForPreviewFromHDFS(UploadFromHdfsInput input) {
+
+    InputStream uploadedInputStream = null;
+    try {
+      uploadedInputStream = getHDFSFileStream(input.getHdfsPath());
+      PreviewData pd = generatePreview(input.getIsFirstRowHeader(), input.getInputFileType(), uploadedInputStream);
+      String tableName = getBasenameFromPath(input.getHdfsPath());
+      return createPreviewResponse(pd, input.getIsFirstRowHeader(),tableName);
+    } catch (Exception e) {
+      LOG.error("Exception occurred while generating preview for hdfs file : " + input.getHdfsPath(), e);
+      throw new ServiceFormattedException(e.getMessage(), e);
+    } finally {
+      if (null != uploadedInputStream) {
+        try {
+          uploadedInputStream.close();
+        } catch (IOException e) {
+          LOG.error("Exception occured while closing the HDFS file stream for path " + input.getHdfsPath(), e);
+        }
+      }
+    }
+  }
 
   @POST
   @Path("/preview")
   @Consumes(MediaType.MULTIPART_FORM_DATA)
   public Response uploadForPreview(
           @FormDataParam("file") InputStream uploadedInputStream,
-          @FormDataParam("file") FormDataContentDisposition fileDetail) {
+          @FormDataParam("file") FormDataContentDisposition fileDetail,
+          @FormDataParam("isFirstRowHeader") Boolean isFirstRowHeader,
+          @FormDataParam("inputFileType") String inputFileType
+  ) {
+    try {
+      PreviewData pd = generatePreview(isFirstRowHeader, inputFileType, uploadedInputStream);
+      return createPreviewResponse(pd, isFirstRowHeader,getBasename(fileDetail.getFileName()));
+    } catch (Exception e) {
+      LOG.error("Exception occurred while generating preview for local file", e);
+      throw new ServiceFormattedException(e.getMessage(), e);
+    }
+  }
 
-    ParseOptions parseOptions = new ParseOptions();
-    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.FILE_TYPE_CSV);
-    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER_FIRST_RECORD);
 
+  @Path("/createTable")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response createTable(TableInput tableInput) {
     try {
-      DataParser dataParser = new DataParser(new InputStreamReader(uploadedInputStream), parseOptions);
+      List<ColumnDescriptionImpl> header = tableInput.getHeader();
+      String databaseName = tableInput.getDatabaseName();
+      String tableName = tableInput.getTableName();
+      Boolean isFirstRowHeader = tableInput.getIsFirstRowHeader();
+      String fileTypeStr = tableInput.getFileType();
+      HiveFileType hiveFileType = HiveFileType.valueOf(fileTypeStr);
+
 
-      dataParser.parsePreview();
+      TableInfo ti = new TableInfo(databaseName, tableName, header, hiveFileType);
+      String tableCreationQuery = generateCreateQuery(ti);
 
-      Map<String, Object> retData = new HashMap<String, Object>();
-      retData.put("header", dataParser.getHeader());
-      retData.put("rows", dataParser.getPreviewRows());
-      retData.put("isFirstRowHeader", true);
+      LOG.info("tableCreationQuery : {}", tableCreationQuery);
 
-      JSONObject jsonObject = new JSONObject(retData);
-      return Response.ok(jsonObject).build();
-    } catch (IOException e) {
+      Job actualTableJob = createJob(tableCreationQuery, databaseName);
+      String actualTableJobId = actualTableJob.getId();
+
+      JSONObject jobObject = new JSONObject();
+      jobObject.put("jobId", actualTableJobId);
+
+      LOG.info("table creation jobId {}", actualTableJobId);
+      return Response.ok(jobObject).status(201).build();
+    } catch (Exception e) {
+      LOG.error("Exception occurred while creating table with input : " + tableInput, e);
       throw new ServiceFormattedException(e.getMessage(), e);
     }
   }
 
-  public static class TableInput {
-    public Boolean isFirstRowHeader;
-    public String header;
-    public String tableName;
-    public String databaseName;
+  @Path("/uploadFromHDFS")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response uploadFileFromHdfs(UploadFromHdfsInput input ) {
+    if (ParseOptions.InputFileType.CSV.toString().equals(input.getInputFileType()) && input.getIsFirstRowHeader().equals(Boolean.FALSE)) {
+      // upload using the LOAD query
+      LoadQueryInput loadQueryInput = new LoadQueryInput(input.getHdfsPath(), input.getDatabaseName(), input.getTableName());
+      String loadQuery = new QueryGenerator().generateLoadQuery(loadQueryInput);
+
+      try {
+        Job job = createJob(loadQuery,  input.getDatabaseName());
+
+        JSONObject jo = new JSONObject();
+        jo.put("jobId", job.getId());
+
+        return Response.ok(jo).build();
+      } catch (Exception e) {
+        LOG.error("Exception occurred while creating job for Load From HDFS query : " + loadQuery, e);
+        throw new ServiceFormattedException(e.getMessage(), e);
+      }
 
-    public TableInput() {
+    } else {
+      // create stream and upload
+      InputStream hdfsStream = null;
+      try {
+        hdfsStream = getHDFSFileStream(input.getHdfsPath());
+        String path = uploadFileFromStream(hdfsStream, input.getIsFirstRowHeader(),input.getInputFileType(),input.getTableName(), input.getDatabaseName());
+
+        JSONObject jo = new JSONObject();
+        jo.put("uploadedPath", path);
+
+        return Response.ok(jo).build();
+      } catch (Exception e) {
+        LOG.error("Exception occurred while uploading the file from HDFS with path : " + input.getHdfsPath(), e);
+        throw new ServiceFormattedException(e.getMessage(), e);
+      } finally {
+        if (null != hdfsStream)
+          try {
+            hdfsStream.close();
+          } catch (IOException e) {
+            LOG.error("Exception occured while closing the HDFS stream for path : " + input.getHdfsPath(), e);
+          }
+      }
     }
+  }
 
-    public Boolean getIsFirstRowHeader() {
-      return isFirstRowHeader;
-    }
+  @Path("/upload")
+  @POST
+  @Consumes(MediaType.MULTIPART_FORM_DATA)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response uploadFile(
+          @FormDataParam("file") InputStream uploadedInputStream,
+          @FormDataParam("file") FormDataContentDisposition fileDetail,
+          @FormDataParam("isFirstRowHeader") Boolean isFirstRowHeader,
+          @FormDataParam("inputFileType") String inputFileType,   // the format of the file uploaded. CSV/JSON etc.
+          @FormDataParam("tableName") String tableName,
+          @FormDataParam("databaseName") String databaseName
+  ) {
+    try {
 
-    public void setIsFirstRowHeader(Boolean isFirstRowHeader) {
-      this.isFirstRowHeader = isFirstRowHeader;
-    }
+      String path = uploadFileFromStream(uploadedInputStream,isFirstRowHeader,inputFileType,tableName,databaseName);
 
-    public String getHeader() {
-      return header;
+      JSONObject jo = new JSONObject();
+      jo.put("uploadedPath", path);
+      return Response.ok(jo).build();
+    } catch (Exception e) {
+      throw new ServiceFormattedException(e.getMessage(), e);
     }
+  }
 
-    public void setHeader(String header) {
-      this.header = header;
-    }
+  @Path("/insertIntoTable")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response insertFromTempTable(InsertFromQueryInput input) {
+    try {
+      String insertQuery = generateInsertFromQuery(input);
+      LOG.info("insertQuery : {}", insertQuery);
+
+      Job job = createJob(insertQuery, "default");
 
-    public String getTableName() {
-      return tableName;
+      JSONObject jo = new JSONObject();
+      jo.put("jobId", job.getId());
+
+      return Response.ok(jo).build();
+    } catch (Exception e) {
+      throw new ServiceFormattedException(e.getMessage(), e);
     }
+  }
 
-    public void setTableName(String tableName) {
-      this.tableName = tableName;
+  @Path("/deleteTable")
+  @POST
+  @Consumes(MediaType.APPLICATION_JSON)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response deleteTable(DeleteQueryInput input) {
+    try {
+      String deleteQuery = generateDeleteQuery(input);
+      LOG.info("deleteQuery : {}", deleteQuery);
+
+      Job job = createJob(deleteQuery, "default");
+
+      JSONObject jo = new JSONObject();
+      jo.put("jobId", job.getId());
+
+      return Response.ok(jo).build();
+    } catch (Exception e) {
+      throw new ServiceFormattedException(e.getMessage(), e);
     }
+  }
 
-    public String getDatabaseName() {
-      return databaseName;
+  private String uploadIntoTable(Reader reader, String databaseName, String tempTableName) {
+    try {
+      String basePath = getHiveMetaStoreLocation();
+      if (null == basePath)
+        basePath = "/apps/hive/warehouse";
+
+      if (!basePath.endsWith("/"))
+        basePath = basePath + "/";
+
+      if (databaseName != null && !databaseName.equals(HIVE_DEFAULT_DB)) {
+        basePath = basePath + databaseName + ".db/";
+      }
+
+      String fullPath = basePath + tempTableName + "/" + tempTableName + ".csv";
+
+      LOG.info("Uploading file into : {}", fullPath);
+
+      uploadFile(fullPath, new ReaderInputStream(reader));
+
+      return fullPath;
+    } catch (Exception e) {
+      throw new ServiceFormattedException(e.getMessage(), e);
     }
+  }
 
-    public void setDatabaseName(String databaseName) {
-      this.databaseName = databaseName;
+  private synchronized JobResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      SharedObjectsFactory connectionsFactory = getSharedObjectsFactory();
+      resourceManager = new JobResourceManager(connectionsFactory, context);
     }
+    return resourceManager;
   }
 
-  @Path("/createTable")
-  @POST
-  @Consumes(MediaType.APPLICATION_JSON)
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response createTable(TableInput tableInput) throws IllegalAccessException, InvocationTargetException, ItemNotFound, NoSuchMethodException {
-    String header = tableInput.getHeader();
-    String databaseName = tableInput.getDatabaseName();
-    String tableName = tableInput.getTableName();
-    Boolean isFirstRowHeader = (Boolean) tableInput.getIsFirstRowHeader();
-
-    Object headerObj = JSONValue.parse(header);
-    JSONArray array = (JSONArray) headerObj;
-    List<ColumnDescription> cdList = new ArrayList<ColumnDescription>(array.size());
-    for (Object o : array) {
-      JSONObject jo = (JSONObject) o;
-      String name = (String) jo.get("name");
-      String type = (String) jo.get("type");
-      Long p = (Long) jo.get("position");
-      Integer position = p != null ? p.intValue() : 0;
-
-      ColumnDescriptionImpl cdi = new ColumnDescriptionImpl(name, type, position);
-      cdList.add(cdi);
+  private synchronized AmbariApi getAmbariApi() {
+    if (null == ambariApi) {
+      ambariApi = new AmbariApi(this.context);
     }
+    return ambariApi;
+  }
+
+  private String generateCreateQuery(TableInfo ti) {
+    return new QueryGenerator().generateCreateQuery(ti);
+  }
+
+  private String generateInsertFromQuery(InsertFromQueryInput input) {
+    return new QueryGenerator().generateInsertFromQuery(input);
+  }
+
+  private String generateDeleteQuery(DeleteQueryInput deleteQueryInput) {
+    return new QueryGenerator().generateDropTableQuery(deleteQueryInput);
+  }
 
-    Map jobInfo = new HashMap<String, String>();//PropertyUtils.describe(request.job);
+  private Job createJob(String query, String databaseName) throws InvocationTargetException, IllegalAccessException, ItemNotFound {
+    Map jobInfo = new HashMap<String, String>();
     jobInfo.put("title", "Internal Table Creation");
-    jobInfo.put("forcedContent", generateCreateQuery(databaseName, tableName, cdList));
+    jobInfo.put("forcedContent", query);
     jobInfo.put("dataBase", databaseName);
 
     LOG.info("jobInfo : " + jobInfo);
@@ -174,124 +326,90 @@ public class UploadService extends BaseService {
     createdJobController.submit();
     getResourceManager().saveIfModified(createdJobController);
 
-    String filePath = (databaseName == null || databaseName.equals(HIVE_DEFAULT_DB)) ? "" : databaseName + ".db/";
-    filePath += tableName + "/" + tableName + ".csv";
-
-    JSONObject jobObject = new JSONObject();
-    jobObject.put("jobId", job.getId());
-    jobObject.put("filePath", filePath);
-
-    LOG.info("Create table query submitted : file should be uploaded at location : {}", filePath);
-    return Response.ok(jobObject).status(201).build();
+    return job;
   }
 
-  @Path("/createTable/status")
-  @GET
-  @Consumes(MediaType.APPLICATION_JSON)
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response isTableCreated(@QueryParam("jobId") int jobId) throws IllegalAccessException, NoSuchMethodException, InvocationTargetException, ItemNotFound, HiveClientException, NoOperationStatusSetException {
-    JobController jobController = getResourceManager().readController(jobId + "");
-    LOG.info("jobController.getStatus().status : {} for job : {}", jobController.getStatus().status, jobController.getJob().getId());
-    JSONObject jsonObject = new JSONObject();
-    jsonObject.put("status", jobController.getStatus().status);
-    return Response.ok(jsonObject).build();
+  private String getHiveMetaStoreLocation() {
+    return this.getAmbariApi().getCluster().getConfigurationValue(HIVE_SITE, HIVE_META_STORE_LOCATION_KEY);
   }
 
-  @Path("/upload")
-  @POST
-  @Consumes(MediaType.MULTIPART_FORM_DATA)
-  public Response uploadFile(
-          @FormDataParam("file") InputStream uploadedInputStream,
-          @FormDataParam("file") FormDataContentDisposition fileDetail,
-          @FormDataParam("isFirstRowHeader") Boolean isFirstRowHeader,
-          @FormDataParam("filePath") String filePath
+  private void uploadFile(final String filePath, InputStream uploadedInputStream)
+          throws IOException, InterruptedException {
+    byte[] chunk = new byte[1024];
+    FSDataOutputStream out = getSharedObjectsFactory().getHdfsApi().create(filePath, false);
+    int n = -1;
+    while ((n = uploadedInputStream.read(chunk)) != -1) {
+      out.write(chunk, 0, n);
+    }
+    out.close();
+  }
 
-  ) throws IOException, InterruptedException {
-    LOG.info("inside uploadFile : isFirstRowHeader : {} , filePath : {}", isFirstRowHeader, filePath);
-/*  This is not working as expected.
+  private PreviewData generatePreview(Boolean isFirstRowHeader, String inputFileType, InputStream uploadedInputStream) throws IOException {
     ParseOptions parseOptions = new ParseOptions();
-    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.FILE_TYPE_CSV);
-    parseOptions.setOption(ParseOptions.HEADERS,cdList);
-
-    if(isFirstRowHeader)
-      parseOptions.setOption(ParseOptions.OPTIONS_HEADER,ParseOptions.HEADER_FIRST_RECORD);
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, inputFileType);
+    if (inputFileType.equals(ParseOptions.InputFileType.CSV.toString()) && !isFirstRowHeader)
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
     else
-      parseOptions.setOption(ParseOptions.OPTIONS_HEADER,ParseOptions.HEADER_PROVIDED_BY_USER);
-
-    DataParser dataParser = new DataParser(new InputStreamReader(uploadedInputStream),parseOptions);
-
-    // remove first row if it is header and send the rest to HDFS
-    if(isFirstRowHeader){
-      if( dataParser.iterator().hasNext() ){
-        dataParser.iterator().next();
-      }
-    }
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
 
-    Reader csvReader = dataParser.getCSVReader();
-*/
+    LOG.info("isFirstRowHeader : {}, inputFileType : {}", isFirstRowHeader, inputFileType);
 
-    // TODO : workaround alert as above method is not working properly
-    // remove first row if it is header and send the rest to HDFS
-    Reader r = new InputStreamReader(uploadedInputStream);
-    if (isFirstRowHeader) {
-      BufferedReader br = new BufferedReader(r, 1); //
-      br.readLine(); // TODO : remove the header line. Wrong if first record is beyond first endline
-    }
+    DataParser dataParser = new DataParser(new InputStreamReader(uploadedInputStream), parseOptions);
 
-    String basePath = getHiveMetaStoreLocation();
-    if (null == basePath)
-      basePath = "/apps/hive/warehouse";
+    return dataParser.parsePreview();
 
-    if (!basePath.endsWith("/"))
-      basePath = basePath + "/";
+  }
 
-    String fullPath = basePath + filePath;
+  private Response createPreviewResponse(PreviewData pd, Boolean isFirstRowHeader, String tableName) {
 
-    uploadTable(new ReaderInputStream(r), fullPath);
+    Map<String, Object> retData = new HashMap<String, Object>();
+    retData.put("header", pd.getHeader());
+    retData.put("rows", pd.getPreviewRows());
+    retData.put("isFirstRowHeader", isFirstRowHeader);
+    retData.put("tableName", tableName);
 
-    LOG.info("create the table successfully at : {}", fullPath);
-    return Response.ok().build();
+    JSONObject jsonObject = new JSONObject(retData);
+    return Response.ok(jsonObject).build();
   }
 
-  private String getHiveMetaStoreLocation() {
-    return this.getAmbariApi().getCluster().getConfigurationValue(HIVE_SITE, HIVE_META_STORE_LOCATION_KEY);
+  private InputStream getHDFSFileStream(String path) throws IOException, InterruptedException {
+    FSDataInputStream fsStream = getSharedObjectsFactory().getHdfsApi().open(path);
+    return fsStream;
   }
 
-  private void uploadTable(InputStream is, String path) throws IOException, InterruptedException {
-    if (!path.endsWith("/")) {
-      path = path + "/";
-    }
+  private String uploadFileFromStream(
+          InputStream uploadedInputStream,
+          Boolean isFirstRowHeader,
+          String inputFileType,   // the format of the file uploaded. CSV/JSON etc.
+          String tableName,
+          String databaseName
 
-    uploadFile(path, is);
-  }
+  ) throws IOException {
+    LOG.info(" uploading file into databaseName {}, tableName {}", databaseName, tableName);
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, inputFileType);
 
-  private void uploadFile(final String filePath, InputStream uploadedInputStream)
-          throws IOException, InterruptedException {
-    byte[] chunk = new byte[1024];
-    FSDataOutputStream out = getSharedObjectsFactory().getHdfsApi().create(filePath, false);
-    while (uploadedInputStream.read(chunk) != -1) {
-      out.write(chunk);
-    }
-    out.close();
-  }
+    DataParser dataParser = new DataParser(new InputStreamReader(uploadedInputStream), parseOptions);
 
+    if (inputFileType.equals(ParseOptions.InputFileType.CSV.toString()) && isFirstRowHeader)
+      dataParser.extractHeader(); // removes the header line if any from the stream
 
-  protected synchronized JobResourceManager getResourceManager() {
-    if (resourceManager == null) {
-      SharedObjectsFactory connectionsFactory = getSharedObjectsFactory();
-      resourceManager = new JobResourceManager(connectionsFactory, context);
-    }
-    return resourceManager;
+    Reader csvReader = dataParser.getTableDataReader();
+    String path = uploadIntoTable(csvReader, databaseName, tableName);
+    return path;
   }
 
-  protected synchronized AmbariApi getAmbariApi() {
-    if (null == ambariApi) {
-      ambariApi = new AmbariApi(this.context);
-    }
-    return ambariApi;
+  private String getBasenameFromPath(String path) {
+    String fileName = new File(path).getName();
+    return getBasename(fileName);
   }
 
-  private String generateCreateQuery(String databaseName, String tableName, List<ColumnDescription> cdList) {
-    return new QueryGenerator().generateCreateQuery(new TableInfo(databaseName, tableName, cdList));
+  private String getBasename(String fileName){
+    int index = fileName.indexOf(".");
+    if(index != -1){
+      return fileName.substring(0,index);
+    }
+
+    return fileName;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/DataParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/DataParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/DataParser.java
new file mode 100644
index 0000000..7eae679
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/DataParser.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.csv.CSVParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.json.JSONParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.xml.XMLParser;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Iterator;
+
+/**
+ * Wrapper/Decorator over the Stream parsers.
+ * Supports XML/JSON/CSV parsing.
+ */
+public class DataParser implements IParser {
+
+  private IParser parser;
+
+  public DataParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.CSV.toString())) {
+      parser = new CSVParser(reader, parseOptions);
+    } else if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.JSON.toString())) {
+      parser = new JSONParser(reader, parseOptions);
+    } else if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.XML.toString())) {
+      parser = new XMLParser(reader, parseOptions);
+    }
+  }
+
+  @Override
+  public Reader getTableDataReader() {
+    return parser.getTableDataReader();
+  }
+
+  @Override
+  public PreviewData parsePreview() {
+    return parser.parsePreview();
+  }
+
+  @Override
+  public Row extractHeader() {
+    return parser.extractHeader();
+  }
+
+  @Override
+  public void close() throws IOException {
+    parser.close();
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return parser.iterator();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/EndOfDocumentException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/EndOfDocumentException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/EndOfDocumentException.java
new file mode 100644
index 0000000..6bbe303
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/EndOfDocumentException.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+
+public class EndOfDocumentException extends Exception {
+  public EndOfDocumentException() {
+  }
+
+  public EndOfDocumentException(String message) {
+    super(message);
+  }
+
+  public EndOfDocumentException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public EndOfDocumentException(Throwable cause) {
+    super(cause);
+  }
+
+  public EndOfDocumentException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/IParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/IParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/IParser.java
new file mode 100644
index 0000000..6056e73
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/IParser.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.client.Row;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Reader;
+import java.util.List;
+
+/**
+ * Interface defining methods for Parsers that can used for generating preview
+ * and uploading table into hive.
+ */
+public interface IParser extends Iterable<Row> {
+
+  /**
+   * @return returns the Reader that can be read to get the table data as CSV Text Data that can be uploaded directly
+   * to HDFS
+   */
+  Reader getTableDataReader();
+
+  PreviewData parsePreview();
+
+  Row extractHeader();
+
+  void close() throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
new file mode 100644
index 0000000..adbdf7f
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+import java.util.HashMap;
+
+public class ParseOptions {
+  public enum InputFileType {
+    CSV,
+    JSON,
+    XML
+  }
+
+  public enum HEADER {
+    FIRST_RECORD,
+    PROVIDED_BY_USER
+  }
+  final public static String OPTIONS_FILE_TYPE = "FILE_TYPE";
+  final public static String OPTIONS_HEADER = "HEADER";
+  final public static String OPTIONS_NUMBER_OF_PREVIEW_ROWS = "NUMBER_OF_PREVIEW_ROWS";
+
+  private HashMap<String, Object> options = new HashMap<>();
+
+  public void setOption(String key, Object value) {
+    this.options.put(key, value);
+  }
+
+  public Object getOption(String key) {
+    return this.options.get(key);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseUtils.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseUtils.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseUtils.java
new file mode 100644
index 0000000..a17346f
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseUtils.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+public class ParseUtils {
+
+  final public static String[] DATE_FORMATS = {"mm/dd/yyyy", "dd/mm/yyyy", "mm-dd-yyyy" /*add more formatss*/};
+
+  public static boolean isInteger(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Integer)
+      return true;
+
+    try {
+      Integer i = Integer.parseInt(object.toString());
+      return true;
+    } catch (NumberFormatException nfe) {
+      return false;
+    }
+  }
+
+  public static boolean isBoolean(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Boolean)
+      return true;
+
+    String strValue = object.toString();
+    if (strValue.equalsIgnoreCase("true") || strValue.equalsIgnoreCase("false"))
+      return true;
+    else
+      return false;
+  }
+
+  public static boolean isLong(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Long)
+      return true;
+
+    try {
+      Long i = Long.parseLong(object.toString());
+      return true;
+    } catch (Exception nfe) {
+      return false;
+    }
+  }
+
+  public static boolean isDouble(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Double)
+      return true;
+
+    try {
+      Double i = Double.parseDouble(object.toString());
+      return true;
+    } catch (Exception nfe) {
+      return false;
+    }
+  }
+
+  public static boolean isChar(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Character)
+      return true;
+
+    String str = object.toString().trim();
+    if (str.length() == 1)
+      return true;
+
+    return false;
+  }
+
+  public static boolean isDate(Object object) {
+    if (object == null)
+      return false;
+
+    if (object instanceof Date)
+      return true;
+
+    String str = object.toString();
+    for (String format : DATE_FORMATS) {
+      try {
+        Date i = new SimpleDateFormat(format).parse(str);
+        return true;
+      } catch (Exception e) {
+      }
+    }
+
+    return false;
+  }
+
+  public static ColumnDescription.DataTypes detectHiveDataType(Object object) {
+    // detect Integer
+    if (isInteger(object)) return ColumnDescription.DataTypes.INT;
+    if (isLong(object)) return ColumnDescription.DataTypes.BIGINT;
+    if (isBoolean(object)) return ColumnDescription.DataTypes.BOOLEAN;
+    if (isDouble(object)) return ColumnDescription.DataTypes.DOUBLE;
+    if (isDate(object)) return ColumnDescription.DataTypes.DATE;
+    if (isChar(object)) return ColumnDescription.DataTypes.CHAR;
+
+    return ColumnDescription.DataTypes.STRING;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
new file mode 100644
index 0000000..d94ad3c
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.TableDataReader;
+
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+/**
+ * provides general implementation for parsing JSON,CSV,XML file
+ * to generate preview rows, headers and column types
+ * also provides TableDataReader for converting any type to CSV.
+ */
+public abstract class Parser implements IParser {
+
+  protected Reader reader; // same as CSV reader in this case
+  protected ParseOptions parseOptions;
+  private int numberOfPreviewRows = 10;
+
+  public Parser(Reader originalReader, ParseOptions parseOptions) {
+    this.reader = originalReader;
+    this.parseOptions = parseOptions;
+  }
+
+  /**
+   * returns which datatype was detected for the maximum number of times in the given column
+   *
+   * @param typeCounts
+   * @param colNum
+   * @return
+   */
+  private int getLikelyDataType(int[][] typeCounts, int colNum) {
+    int[] colArray = typeCounts[colNum];
+    int maxIndex = 0;
+    int i = 1;
+    for (; i < colArray.length; i++) {
+      if (colArray[i] > colArray[maxIndex])
+        maxIndex = i;
+    }
+
+    return maxIndex;
+  }
+
+  @Override
+  public Reader getTableDataReader() {
+    return new TableDataReader(this.iterator());
+  }
+
+  @Override
+  public PreviewData parsePreview() {
+    List<Row> previewRows;
+    List<ColumnDescription> header;
+
+    try {
+      numberOfPreviewRows = (Integer) parseOptions.getOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS);
+    } catch (Exception e) {
+    }
+
+    int numberOfRows = numberOfPreviewRows;
+    previewRows = new ArrayList<Row>(numberOfPreviewRows + 1); // size including the header.
+
+    Row headerRow = null;
+    Integer numOfCols = null;
+    int[][] typeCounts = null;
+
+    if (parseOptions.getOption(ParseOptions.OPTIONS_HEADER) != null && parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.FIRST_RECORD.toString())) {
+      if (!this.iterator().hasNext()) {
+        throw new NoSuchElementException("Cannot parse Header");
+      }
+      headerRow = extractHeader();
+      numOfCols = headerRow.getRow().length;
+      typeCounts = new int[numOfCols][ColumnDescription.DataTypes.values().length];
+      previewRows.add(headerRow);
+    }
+
+    // find data types.
+
+    Row r;
+    if (iterator().hasNext()) {
+      r = iterator().next();
+      if( null == numOfCols ) {
+        numOfCols = r.getRow().length;
+        typeCounts = new int[numOfCols][ColumnDescription.DataTypes.values().length];
+      }
+    } else {
+        throw new NoSuchElementException("No rows in the file.");
+    }
+
+    while (true) {
+      // create Header definition from row
+      Object[] values = r.getRow();
+
+      Object[] newValues= new Object[numOfCols]; // adds null if less columns detected and removes extra columns if any
+
+      for (int colNum = 0; colNum < numOfCols; colNum++) {
+        if(colNum < values.length) {
+          // detect type
+          ColumnDescription.DataTypes type = ParseUtils.detectHiveDataType(values[colNum]);
+          typeCounts[colNum][type.ordinal()]++;
+          newValues[colNum] = values[colNum];
+        }else{
+          newValues[colNum] = null;
+        }
+      }
+
+      previewRows.add(new Row(newValues));
+
+      numberOfRows--;
+      if (numberOfRows <= 0 || !iterator().hasNext())
+        break;
+
+      r = iterator().next();
+    }
+
+    if (previewRows.size() <= 0)
+      throw new NoSuchElementException("Does not contain any rows.");
+
+    header = new ArrayList<>(numOfCols);
+    for (int colNum = 0; colNum < numOfCols; colNum++) {
+      int dataTypeId = getLikelyDataType(typeCounts, colNum);
+      ColumnDescription.DataTypes type = ColumnDescription.DataTypes.values()[dataTypeId];
+      String colName = "Column" + colNum;
+      if (null != headerRow)
+        colName = (String) headerRow.getRow()[colNum];
+
+      ColumnDescription cd = new ColumnDescriptionImpl(colName, type.toString(), colNum);
+      header.add(cd);
+    }
+
+    return new PreviewData(header,previewRows);
+  }
+}


[2/3] ambari git commit: AMBARI-15046. Hive view Upload table feature now supports upload from HDFS and Local.Supports JSON/XML/CSV as uploaded files and ORC and all other hive internal storage types. (Nitiraj Rathore via pallavkul)

Posted by pa...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/PreviewData.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/PreviewData.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/PreviewData.java
new file mode 100644
index 0000000..8fcae95
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/PreviewData.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.client.Row;
+
+import java.util.List;
+
+/**
+ * Encapsulating preview data from parser.
+ */
+public class PreviewData {
+  private List<ColumnDescription> header;
+  private List<Row> previewRows;
+
+  public PreviewData() {
+  }
+
+  public PreviewData(List<ColumnDescription> header, List<Row> previewRows) {
+    this.header = header;
+    this.previewRows = previewRows;
+  }
+
+  public List<ColumnDescription> getHeader() {
+    return header;
+  }
+
+  public void setHeader(List<ColumnDescription> header) {
+    this.header = header;
+  }
+
+  public List<Row> getPreviewRows() {
+    return previewRows;
+  }
+
+  public void setPreviewRows(List<Row> previewRows) {
+    this.previewRows = previewRows;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java
new file mode 100644
index 0000000..69fe864
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive.client.Row;
+
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+
+/**
+ * Converts the Map of values created by JSON/XML Parser into ordered values in Row
+ * Takes RowMapIterator as input
+ */
+public class RowIterator implements Iterator<Row> {
+
+  private LinkedList<String> headers = null;
+  private RowMapIterator iterator;
+
+  /**
+   * creates a row iterator for the map values in RowMapIterator
+   * keeps the keys in map as header.
+   * @param iterator
+   */
+  public RowIterator(RowMapIterator iterator) {
+    this.iterator = iterator;
+    LinkedHashMap<String, String> obj = iterator.peek();
+    if (null != obj)
+      headers = new LinkedList<>(obj.keySet());
+  }
+
+  @Override
+  public boolean hasNext() {
+    return iterator.hasNext();
+  }
+
+
+  @Override
+  public Row next() {
+    LinkedHashMap<String, String> r = this.iterator.next();
+    if (null == r) {
+      return null;
+    }
+
+    return convertToRow(r);
+  }
+
+  @Override
+  public void remove() {
+    iterator.remove();
+  }
+
+  /**
+   * @return : ordered collection of string of headers
+   */
+  public LinkedList<String> extractHeaders() {
+    return headers;
+  }
+
+  /**
+   * converts the map into a Row
+   * @param lr
+   * @return
+   */
+  private Row convertToRow(LinkedHashMap<String, String> lr) {
+    Object[] data = new Object[headers.size()];
+    int i = 0;
+    for (String cd : headers) {
+      String d = lr.get(cd);
+
+      if (d != null)
+        d = d.trim(); // trim to remove any \n etc which is used as a separator for rows in TableDataReader
+
+      data[i++] = d;
+    }
+
+    return new Row(data);
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowMapIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowMapIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowMapIterator.java
new file mode 100644
index 0000000..a0ff2d7
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowMapIterator.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+
+/**
+ * iterator which generates Ordered Map of column name and values for each row from streams like JSON and XML
+ */
+public interface RowMapIterator extends Iterator<LinkedHashMap<String, String>> {
+  LinkedHashMap<String, String> peek() ;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java
new file mode 100644
index 0000000..3342f49
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.csv;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.commons.csv.CSVRecord;
+
+import java.util.Iterator;
+
+/**
+ * iterates over the input CSV records and generates Row objects
+ */
+class CSVIterator implements Iterator<Row> {
+
+  private Iterator<CSVRecord> iterator;
+
+  public CSVIterator(Iterator<CSVRecord> iterator) {
+    this.iterator = iterator;
+  }
+
+  @Override
+  public boolean hasNext() {
+    return iterator.hasNext();
+  }
+
+  @Override
+  public Row next() {
+    CSVRecord row = iterator.next();
+    Object[] values = new Object[row.size()];
+    for (int i = 0; i < values.length; i++) {
+      values[i] = row.get(i);
+    }
+    Row r = new Row(values);
+    return r;
+  }
+
+  @Override
+  public void remove() {
+    this.iterator.remove();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java
new file mode 100644
index 0000000..56cff58
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive.resources.uploads.parsers.csv;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.Parser;
+import org.apache.commons.csv.CSVFormat;
+
+import java.io.*;
+import java.util.*;
+
+/**
+ * Parses the given Reader which contains CSV stream and extracts headers and rows, and detect datatypes of columns
+ */
+public class CSVParser extends Parser {
+
+  private CSVIterator iterator;
+  private org.apache.commons.csv.CSVParser parser;
+
+  public CSVParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    parser = new org.apache.commons.csv.CSVParser(this.reader, CSVFormat.EXCEL);
+    iterator = new CSVIterator(parser.iterator());
+  }
+
+  @Override
+  public Row extractHeader() {
+    return this.iterator().next();
+  }
+
+  @Override
+  public void close() throws IOException {
+    this.parser.close();
+  }
+
+  public Iterator<Row> iterator() {
+    return iterator; // only one iterator per parser.
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONIterator.java
new file mode 100644
index 0000000..534d9e7
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONIterator.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.json;
+
+import com.google.gson.stream.JsonReader;
+import com.google.gson.stream.JsonToken;
+import org.apache.ambari.view.hive.resources.uploads.parsers.EndOfDocumentException;
+import org.apache.ambari.view.hive.resources.uploads.parsers.RowMapIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.LinkedHashMap;
+
+/**
+ * iterates over the JsonReader and reads creates row data
+ * assumes the array of json objects.
+ * eg : [ { "col1Name" : "value-1-1", "col2Name" : "value-1-2"}, { "col1Name" : "value-2-1", "col2Name" : "value-2-2"}]
+ */
+class JSONIterator implements RowMapIterator {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(JSONIterator.class);
+
+  private LinkedHashMap<String, String> nextObject = null;
+
+  private LinkedHashMap<String, String> readNextObject(JsonReader reader) throws IOException, EndOfDocumentException {
+    LinkedHashMap<String, String> row = new LinkedHashMap<>();
+    boolean objectStarted = false;
+    boolean shouldBeName = false;
+    String currentName = null;
+
+    while (true) {
+      JsonToken token = reader.peek();
+      switch (token) {
+        case BEGIN_ARRAY:
+          throw new IllegalArgumentException("Row data cannot have an array.");
+        case END_ARRAY:
+          throw new EndOfDocumentException("End of Json Array document.");
+        case BEGIN_OBJECT:
+          if (objectStarted == true) {
+            throw new IllegalArgumentException("Nested objects not supported.");
+          }
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("name expected, got begin_object");
+          }
+          objectStarted = true;
+          shouldBeName = true;
+          reader.beginObject();
+          break;
+        case END_OBJECT:
+          if (shouldBeName == false) {
+            throw new IllegalArgumentException("value expected, got end_object");
+          }
+          reader.endObject();
+          return row;
+        case NAME:
+          if (shouldBeName == false) {
+            throw new IllegalArgumentException("name not expected at this point.");
+          }
+          shouldBeName = false;
+          currentName = reader.nextName();
+          break;
+        case NUMBER:
+        case STRING:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          String n = reader.nextString();
+          row.put(currentName, n);
+          shouldBeName = true;
+          break;
+        case BOOLEAN:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          String b = String.valueOf(reader.nextBoolean());
+          row.put(currentName, b);
+          shouldBeName = true;
+          break;
+        case NULL:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          reader.nextNull();
+          row.put(currentName, "");
+          shouldBeName = true;
+          break;
+        case END_DOCUMENT:
+          return row;
+
+        default:
+          throw new IllegalArgumentException("Illegal token detected inside json: token : " + token.toString());
+      }
+    }
+  }
+
+  private JsonReader reader;
+
+  public JSONIterator(JsonReader reader) throws IOException {
+    this.reader = reader;
+    // test the start of array
+    JsonToken jt = reader.peek();
+    if (jt != JsonToken.BEGIN_ARRAY) {
+      throw new IllegalArgumentException("Expected the whole document to contain a single JsonArray.");
+    }
+
+    reader.beginArray(); // read the start of array
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+    }
+  }
+
+  @Override
+  public boolean hasNext() {
+    return null != nextObject;
+  }
+
+  public LinkedHashMap<String, String> peek() {
+    return nextObject;
+  }
+
+  @Override
+  public LinkedHashMap<String, String> next() {
+    LinkedHashMap<String, String> currObject = nextObject;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+      LOG.debug("End of Json document reached with next character ending the JSON Array.");
+      nextObject = null;
+    } catch (Exception e){
+      // for any other exception throw error right away
+      throw new IllegalArgumentException(e);
+    }
+    return currObject;
+  }
+
+  @Override
+  public void remove() {
+    // no operation.
+    LOG.info("No operation when remove called on JSONIterator.");
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONParser.java
new file mode 100644
index 0000000..1d61798
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONParser.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.json;
+
+import com.google.gson.stream.JsonReader;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.Parser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.RowIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Collection;
+import java.util.Iterator;
+
+
+/**
+ * Parses the input data from reader as JSON and provides iterator for rows.
+ *
+ * Expects the input reader to contains a JsonArray in which each element is a JsonObject
+ * corresponding to the row.
+ * eg. :
+ *
+ * [
+ *  {row1-col1, row1-col2, row1-col3},
+ *  {row2-col1, row2-col2, row2-col3}
+ * ]
+ *
+ */
+public class JSONParser extends Parser {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(JSONParser.class);
+
+  private RowIterator iterator;
+  private JsonReader jsonReader;
+  private JSONIterator JSONIterator;
+
+  public JSONParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    this.jsonReader = new JsonReader(this.reader);
+    JSONIterator = new JSONIterator(this.jsonReader);
+    iterator = new RowIterator(JSONIterator);
+  }
+
+  @Override
+  public Row extractHeader() {
+    Collection<String> headers = this.iterator.extractHeaders();
+    Object[] objs = new Object[headers.size()];
+    Iterator<String> iterator = headers.iterator();
+    for(int i = 0 ; i < headers.size() ; i++){
+      objs[i] = iterator.next();
+    }
+
+    return new Row(objs);
+  }
+
+  @Override
+  public void close() throws IOException {
+    this.jsonReader.close();
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return iterator;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLIterator.java
new file mode 100644
index 0000000..5852dfc
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLIterator.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.xml;
+
+import org.apache.ambari.view.hive.resources.uploads.parsers.EndOfDocumentException;
+import org.apache.ambari.view.hive.resources.uploads.parsers.RowMapIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.namespace.QName;
+import javax.xml.stream.XMLEventReader;
+import javax.xml.stream.XMLStreamConstants;
+import javax.xml.stream.XMLStreamException;
+import javax.xml.stream.events.*;
+import java.io.IOException;
+import java.util.LinkedHashMap;
+
+/**
+ * assumes XML of following format
+ * <table>
+ * <row>
+ * <col name="col1Name">row1-col1-Data</col>
+ * <col name="col2Name">row1-col2-Data</col>
+ * <col name="col3Name">row1-col3-Data</col>
+ * <col name="col4Name">row1-col4-Data</col>
+ * </row>
+ * <row>
+ * <col name="col1Name">row2-col1-Data</col>
+ * <col name="col2Name">row2-col2-Data</col>
+ * <col name="col3Name">row2-col3-Data</col>
+ * <col name="col4Name">row2-col4-Data</col>
+ * </row>
+ * </table>
+ */
+class XMLIterator implements RowMapIterator {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(XMLIterator.class);
+
+  private LinkedHashMap<String, String> nextObject = null;
+  private static final String TAG_TABLE = "table";
+  private static final String TAG_ROW = "row";
+  private static final String TAG_COL = "col";
+  private boolean documentStarted = false;
+  private XMLEventReader reader;
+
+  public XMLIterator(XMLEventReader reader) throws IOException {
+    this.reader = reader;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+      LOG.debug("error : {}", e);
+    } catch (XMLStreamException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public boolean hasNext() {
+    return null != nextObject;
+  }
+
+  public LinkedHashMap<String, String> peek() {
+    return nextObject;
+  }
+
+  @Override
+  public LinkedHashMap<String, String> next() {
+    LinkedHashMap<String, String> currObject = nextObject;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (IOException e) {
+      LOG.error("Exception occured while reading the next row from XML : {} ", e);
+      nextObject = null;
+    } catch (EndOfDocumentException e) {
+      LOG.debug("End of XML document reached with next character ending the XML.");
+      nextObject = null;
+    } catch (XMLStreamException e) {
+      LOG.error("Exception occured while reading the next row from XML : {} ", e);
+      nextObject = null;
+    }
+    return currObject;
+  }
+
+  @Override
+  public void remove() {
+    // no operation.
+    LOG.info("No operation when remove called.");
+  }
+
+  private LinkedHashMap<String, String> readNextObject(XMLEventReader reader) throws IOException, EndOfDocumentException, XMLStreamException {
+    LinkedHashMap<String, String> row = new LinkedHashMap<>();
+    boolean objectStarted = false;
+    String currentName = null;
+
+    while (true) {
+      XMLEvent event = reader.nextEvent();
+      switch (event.getEventType()) {
+        case XMLStreamConstants.START_ELEMENT:
+          StartElement startElement = event.asStartElement();
+          String qName = startElement.getName().getLocalPart();
+          LOG.debug("startName : {}" , qName);
+          switch (qName) {
+            case TAG_TABLE:
+              if (documentStarted) {
+                throw new IllegalArgumentException("Cannot have a <table> tag nested inside another <table> tag");
+              } else {
+                documentStarted = true;
+              }
+              break;
+            case TAG_ROW:
+              if (objectStarted) {
+                throw new IllegalArgumentException("Cannot have a <row> tag nested inside another <row> tag");
+              } else {
+                objectStarted = true;
+              }
+              break;
+            case TAG_COL:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray tag " + qName);
+              }
+              Attribute nameAttr = startElement.getAttributeByName( new QName("name"));
+              if( null == nameAttr ){
+                throw new IllegalArgumentException("Missing name attribute in col tag.");
+              }
+              currentName = nameAttr.getValue();
+              break;
+            default:
+              throw new IllegalArgumentException("Illegal start tag " + qName + " encountered.");
+          }
+          break;
+        case XMLStreamConstants.END_ELEMENT:
+          EndElement endElement = event.asEndElement();
+          String name = endElement.getName().getLocalPart();
+          LOG.debug("endName : {}", name);
+          switch (name) {
+            case TAG_TABLE:
+              if (!documentStarted) {
+                throw new IllegalArgumentException("Stray </table> tag.");
+              }
+              throw new EndOfDocumentException("End of XML document.");
+
+            case TAG_ROW:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray </row> tag.");
+              }
+              return row;
+
+            case TAG_COL:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray tag " + name);
+              }
+              currentName = null;
+              break;
+
+            default:
+              throw new IllegalArgumentException("Illegal start ending " + name + " encountered.");
+          }
+          break;
+        case XMLStreamConstants.CHARACTERS:
+          Characters characters = event.asCharacters();
+          if (characters.isWhiteSpace() && currentName == null)
+            break;
+          String data = characters.getData();
+          LOG.debug("character data : {}", data);
+          if (currentName == null) {
+            throw new IllegalArgumentException("Illegal characters outside any tag : " + data);
+          } else {
+            String oldData = row.get(currentName);
+            if (null != oldData) {
+              data = oldData + data;
+            }
+            row.put(currentName, data);
+          }
+          break;
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLParser.java
new file mode 100644
index 0000000..74bf272
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLParser.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.xml;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.Parser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.RowIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.stream.XMLEventReader;
+import javax.xml.stream.XMLInputFactory;
+import javax.xml.stream.XMLStreamException;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Collection;
+import java.util.Iterator;
+
+/**
+ * assumes XML of following format
+ * <table>
+ * <row>
+ * <col name="col1Name">row1-col1-Data</col>
+ * <col name="col2Name">row1-col2-Data</col>
+ * <col name="col3Name">row1-col3-Data</col>
+ * <col name="col4Name">row1-col4-Data</col>
+ * </row>
+ * <row>
+ * <col name="col1Name">row2-col1-Data</col>
+ * <col name="col2Name">row2-col2-Data</col>
+ * <col name="col3Name">row2-col3-Data</col>
+ * <col name="col4Name">row2-col4-Data</col>
+ * </row>
+ * </table>
+ */
+public class XMLParser extends Parser {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(XMLParser.class);
+
+  private RowIterator iterator;
+  private XMLEventReader xmlReader;
+  private XMLIterator xmlIterator;
+
+  public XMLParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    XMLInputFactory factory = XMLInputFactory.newInstance();
+    try {
+      this.xmlReader = factory.createXMLEventReader(reader);
+    } catch (XMLStreamException e) {
+      LOG.error("error occurred while creating xml reader : ", e);
+      throw new IOException("error occurred while creating xml reader : ", e);
+    }
+    xmlIterator = new XMLIterator(this.xmlReader);
+    iterator = new RowIterator(xmlIterator);
+  }
+
+  @Override
+  public Row extractHeader() {
+    Collection<String> headers = this.iterator.extractHeaders();
+    Object[] objs = new Object[headers.size()];
+    Iterator<String> iterator = headers.iterator();
+    for (int i = 0; i < headers.size(); i++) {
+      objs[i] = iterator.next();
+    }
+
+    return new Row(objs);
+  }
+
+  @Override
+  public void close() throws IOException {
+    try {
+      this.xmlReader.close();
+    } catch (XMLStreamException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return iterator;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/DeleteQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/DeleteQueryInput.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/DeleteQueryInput.java
new file mode 100644
index 0000000..1fe30fd
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/DeleteQueryInput.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+public class DeleteQueryInput {
+  private String database;
+  private String table;
+
+  public DeleteQueryInput() {
+  }
+
+  public DeleteQueryInput(String database, String table) {
+    this.database = database;
+    this.table = table;
+  }
+
+  public String getDatabase() {
+    return database;
+  }
+
+  public void setDatabase(String database) {
+    this.database = database;
+  }
+
+  public String getTable() {
+    return table;
+  }
+
+  public void setTable(String table) {
+    this.table = table;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java
new file mode 100644
index 0000000..5befc51
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+public class InsertFromQueryInput {
+  private String fromDatabase;
+  private String fromTable;
+  private String toDatabase;
+  private String toTable;
+
+  public InsertFromQueryInput() {
+  }
+
+  public InsertFromQueryInput(String fromDatabase, String fromTable, String toDatabase, String toTable) {
+    this.fromDatabase = fromDatabase;
+    this.fromTable = fromTable;
+    this.toDatabase = toDatabase;
+    this.toTable = toTable;
+  }
+
+  public String getFromDatabase() {
+    return fromDatabase;
+  }
+
+  public void setFromDatabase(String fromDatabase) {
+    this.fromDatabase = fromDatabase;
+  }
+
+  public String getFromTable() {
+    return fromTable;
+  }
+
+  public void setFromTable(String fromTable) {
+    this.fromTable = fromTable;
+  }
+
+  public String getToDatabase() {
+    return toDatabase;
+  }
+
+  public void setToDatabase(String toDatabase) {
+    this.toDatabase = toDatabase;
+  }
+
+  public String getToTable() {
+    return toTable;
+  }
+
+  public void setToTable(String toTable) {
+    this.toTable = toTable;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/LoadQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/LoadQueryInput.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/LoadQueryInput.java
new file mode 100644
index 0000000..122b754
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/LoadQueryInput.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+/**
+ * input for QueryGenerator for generating Load From Query
+ */
+public class LoadQueryInput {
+  private String hdfsFilePath;
+  private String databaseName;
+  private String tableName;
+
+  public LoadQueryInput(String hdfsFilePath, String databaseName, String tableName) {
+    this.hdfsFilePath = hdfsFilePath;
+    this.databaseName = databaseName;
+    this.tableName = tableName;
+  }
+
+  public String getHdfsFilePath() {
+    return hdfsFilePath;
+  }
+
+  public void setHdfsFilePath(String hdfsFilePath) {
+    this.hdfsFilePath = hdfsFilePath;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  @Override
+  public String toString() {
+    return "LoadQueryInput{" +
+            "hdfsFilePath='" + hdfsFilePath + '\'' +
+            ", databaseName='" + databaseName + '\'' +
+            ", tableName='" + tableName + '\'' +
+            '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java
new file mode 100644
index 0000000..6bab229
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.resources.uploads.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+/**
+ * generates the sql query from given data
+ */
+public class QueryGenerator {
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(QueryGenerator.class);
+
+  public String generateCreateQuery(TableInfo tableInfo) {
+    String tableName = tableInfo.getTableName();
+    List<ColumnDescriptionImpl> cdList = tableInfo.getColumns();
+
+    StringBuilder query = new StringBuilder();
+    query.append("create table " + tableName + " (");
+    Collections.sort(cdList, new Comparator<ColumnDescription>() {
+      @Override
+      public int compare(ColumnDescription o1, ColumnDescription o2) {
+        return o1.getPosition() - o2.getPosition();
+      }
+    });
+
+    boolean first = true;
+    for (ColumnDescriptionImpl cd : cdList) {
+      if (first) {
+        first = false;
+      } else {
+        query.append(", ");
+      }
+
+      query.append(cd.getName() + " " + cd.getType());
+      if (cd.getPrecision() != null) {
+        query.append("(").append(cd.getPrecision());
+        if (cd.getScale() != null) {
+          query.append(",").append(cd.getScale());
+        }
+        query.append(")");
+      }
+
+    }
+
+    query.append(")");
+
+    if (tableInfo.getHiveFileType() == HiveFileType.TEXTFILE)
+      query.append(" ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE;");
+    else
+      query.append(" STORED AS " + tableInfo.getHiveFileType() + ";");
+
+    String queryString = query.toString();
+    LOG.info("Query : {}", queryString);
+    return queryString;
+  }
+
+  public String generateInsertFromQuery(InsertFromQueryInput ifqi) {
+    String insertQuery = "insert into table " + ifqi.getToDatabase() + "." + ifqi.getToTable() + " select * from " + ifqi.getFromDatabase() + "." + ifqi.getFromTable();
+    LOG.info("Insert Query : {}", insertQuery);
+    return insertQuery;
+  }
+
+  public String generateDropTableQuery(DeleteQueryInput deleteQueryInput) {
+    String dropQuery = "drop table " + deleteQueryInput.getDatabase() + "." + deleteQueryInput.getTable();
+    LOG.info("Drop Query : {}", dropQuery);
+    return dropQuery;
+  }
+
+  public String generateLoadQuery(LoadQueryInput loadQueryInput) {
+    String loadFromQuery = "LOAD DATA INPATH '"  + loadQueryInput.getHdfsFilePath() + "' INTO TABLE " + loadQueryInput.getDatabaseName() + "." + loadQueryInput.getTableName() + ";" ;
+    LOG.info("Load From Query : {}", loadFromQuery);
+    return loadFromQuery;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java
new file mode 100644
index 0000000..903e5b0
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.HiveFileType;
+
+import java.util.List;
+
+/**
+ * used as input in Query generation
+ */
+public class TableInfo {
+  private String tableName;
+  private String databaseName;
+  private List<ColumnDescriptionImpl> columns;
+  private HiveFileType hiveFileType;
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public List<ColumnDescriptionImpl> getColumns() {
+    return columns;
+  }
+
+  public void setColumns(List<ColumnDescriptionImpl> columns) {
+    this.columns = columns;
+  }
+
+  public HiveFileType getHiveFileType() {
+    return hiveFileType;
+  }
+
+  public void setHiveFileType(HiveFileType hiveFileType) {
+    this.hiveFileType = hiveFileType;
+  }
+
+  public TableInfo(String databaseName, String tableName, List<ColumnDescriptionImpl> columns, HiveFileType hiveFileType) {
+    this.tableName = tableName;
+    this.databaseName = databaseName;
+    this.columns = columns;
+    this.hiveFileType = hiveFileType;
+  }
+
+  public TableInfo(TableInfo tableInfo) {
+    this.tableName = tableInfo.tableName;
+    this.databaseName = tableInfo.databaseName;
+    this.columns = tableInfo.columns;
+    this.hiveFileType = tableInfo.hiveFileType;
+  }
+
+  public TableInfo() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/upload-table.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/upload-table.js
index 6a9c54b..ef4df43 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/upload-table.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/upload-table.js
@@ -22,12 +22,6 @@ import application from './application';
 import FileUploader from './file-upload';
 
 export default application.extend({
-  hdrs : function(){
-    console.log("inside hdrs : headers : ",this.get('headers'));
-    var h = Ember.$.extend(true, {},this.get('headers'));
-    delete h['Content-Type'];
-    return h;
-  }.property('headers'),
 
   buildUploadURL: function (path) {
     return this.buildURL() + "/resources/upload/" + path;
@@ -40,9 +34,10 @@ export default application.extend({
     console.log("uploader : extras : ", extras);
     console.log("uploader : files : ", files);
 
-    console.log("hdrs : ", this.get('hdrs'));
+    var hdrs = Ember.$.extend(true, {},this.get('headers'));
+    delete hdrs['Content-Type'];
     var uploader = FileUploader.create({
-      headers: this.get('hdrs'),
+      headers: hdrs,
       url: uploadUrl
     });
 
@@ -53,24 +48,46 @@ export default application.extend({
   },
 
   createTable: function (tableData) {
-    var _this = this;
-    var postHeader = JSON.parse(JSON.stringify(this.get('headers')));
-    console.log("headers postHeadesfsfdfsfsfss : : " , postHeader);
-    return Ember.$.ajax(      {
-        url :  this.buildUploadURL("createTable"),
-        type : 'post',
-        data: JSON.stringify(tableData),
-        headers: this.get('headers'),
-        dataType : 'json'
-      }
-    );
+    console.log("creating table with data :", tableData);
+    return this.doPost("createTable",tableData);
   },
 
-  getCreateTableResult : function(jobId){
-    return Ember.$.ajax(this.buildUploadURL("createTable/status"),{
-      data : {"jobId":jobId},
-      type: "get",
-      headers: this.get('headers')
-    });
+  insertIntoTable: function(insertData){
+    console.log("inserting into table with data : ", insertData);
+    return this.doPost("insertIntoTable",insertData);
+  },
+
+  deleteTable: function(deleteData){
+    console.log("delete table with info : ", deleteData);
+    return this.doPost("deleteTable",deleteData);
+  },
+
+  doPost : function(path,inputData){
+    var self = this;
+    return new Ember.RSVP.Promise(function(resolve,reject){
+                 Ember.$.ajax({
+                     url :  self.buildUploadURL(path),
+                     type : 'post',
+                     data: JSON.stringify(inputData),
+                     headers: self.get('headers'),
+                     dataType : 'json'
+                 }).done(function(data) {
+                     console.log( "inside done : data : ", data );
+                     resolve(data);
+                 }).fail(function(error) {
+                     console.log( "inside fail error :  ", error );
+                     reject(error);
+                 });
+              });
+  },
+
+  previewFromHDFS : function(previewFromHdfsData){
+    console.log("preview from hdfs with info : ", previewFromHdfsData);
+    return this.doPost("previewFromHdfs",previewFromHdfsData)
+  },
+
+  uploadFromHDFS : function(uploadFromHdfsData){
+    console.log("upload from hdfs with info : ", uploadFromHdfsData);
+    return this.doPost("uploadFromHDFS",uploadFromHdfsData)
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/components/input-header.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/input-header.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/input-header.js
new file mode 100644
index 0000000..7ff5bf7
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/input-header.js
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  tagName: '',
+  dataTypes: null,
+  column: null,
+  precisionChanged: Ember.observer('column.precision', function () {
+    var col = this.get('column');
+    if( typeof col.precision !== 'number') {
+        Ember.set(col, 'precision', Number(col.precision));
+      }
+  }),
+
+  scaleChanged: Ember.observer('column.scale', function () {
+    var col = this.get('column');
+    if( typeof col.scale !== 'number'){
+      Ember.set(col,'scale',Number(col.scale));
+    }
+  }),
+
+  typeChanged: Ember.observer('column.type', function () {
+    var col = this.get('column');
+
+    var type = col.type;
+    if( type != "DECIMAL" ){
+      Ember.set(col,'scale');
+    }
+
+    if(type != "VARCHAR" && type != "CHAR" && type != "DECIMAL" ){
+      Ember.set(col,'precision');
+    }
+  }),
+
+  noPrecision: Ember.computed('column.type', function () {
+    var type = this.get('column').type;
+    return (type == "VARCHAR" || type == "CHAR" || type == "DECIMAL" ) ? false : true;
+  }),
+
+  noScale: Ember.computed('column.type', function () {
+    return this.get('column').type == "DECIMAL" ? false : true;
+  })
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/components/radio-button.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/radio-button.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/radio-button.js
new file mode 100644
index 0000000..a07caaf
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/radio-button.js
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  tagName: 'input',
+  type: 'radio',
+  attributeBindings: ['type', 'htmlChecked:checked', 'value', 'name', 'disabled'],
+
+  htmlChecked: function() {
+    return this.get('value') === this.get('checked');
+  }.property('value', 'checked'),
+
+  change: function() {
+    this.set('checked', this.get('value'));
+  },
+
+  _updateElementValue: function() {
+    Ember.run.next(this, function() {
+      this.$().prop('checked', this.get('htmlChecked'));
+    });
+  }.observes('htmlChecked')
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
index ab7b934..5085cc5 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
@@ -22,35 +22,67 @@ import constants from 'hive/utils/constants';
 
 
 export default Ember.Controller.extend({
+  isLocalUpload : Ember.computed.equal("uploadSource","local"),
+  uploadSource : "local",
+  hdfsPath : "",
+  jobService: Ember.inject.service(constants.namingConventions.job),
   notifyService: Ember.inject.service(constants.namingConventions.notify),
-  needs : ['databases'],
-  showErrors : false,
+  needs: ['databases'],
+  showErrors: false,
   uploader: Uploader.create(),
   baseUrl: "/resources/upload",
-  isFirstRowHeader: null, // is first row  header
+  isFirstRowHeader: true, // is first row  header
   header: null,  // header received from server
   files: null, // files that need to be uploaded only file[0] is relevant
   firstRow: [], // the actual first row of the table.
   rows: null,  // preview rows received from server
-  databaseName:null,
-  selectedDatabase : null,
-  filePath : null,
+  databaseName: null,
+  selectedDatabase: null,
+  filePath: null,
   tableName: null,
-  dataTypes : [
-     "TINYINT", //
-     "SMALLINT", //
-     "INT", //
-     "BIGINT", //
-     "BOOLEAN", //
-     "FLOAT", //
-     "DOUBLE", //
-     "STRING", //
-     "BINARY", // -- (Note: Available in Hive 0.8.0 and later)
-     "TIMESTAMP", // -- (Note: Available in Hive 0.8.0 and later)
-     "DECIMAL", // -- (Note: Available in Hive 0.11.0 and later)
-     "DATE", // -- (Note: Available in Hive 0.12.0 and later)
-     "VARCHAR", // -- (Note: Available in Hive 0.12.0 and later)
-     "CHAR" // -- (Note: Available in Hive 0.13.0 and later)
+  uploadProgressInfos : [],
+  onChangeUploadSource : function(){
+    this.clearFields();
+  }.observes("uploadSource"),
+  uploadProgressInfo : Ember.computed("uploadProgressInfos.[]",function(){
+    var info = "";
+    for( var i = 0 ; i < this.get('uploadProgressInfos').length ; i++)
+        info += this.get('uploadProgressInfos').objectAt(i);
+
+    return new Ember.Handlebars.SafeString(info);
+  }),
+  inputFileTypes :[
+    {id : "CSV", name : "CSV"},
+    {id : "JSON", name : "JSON"},
+    {id : "XML", name : "XML"}
+  ],
+  inputFileType : {id : "CSV", name : "CSV"},
+  inputFileTypeCSV : Ember.computed.equal('inputFileType.id',"CSV"),
+  fileTypes:[
+    "SEQUENCEFILE",
+    "TEXTFILE"    ,
+    "RCFILE"      ,
+    "ORC"         ,
+    "PARQUET"     ,
+    "AVRO"        ,
+    "INPUTFORMAT"
+  ],
+  selectedFileType: "ORC",
+  dataTypes: [
+    "TINYINT", //
+    "SMALLINT", //
+    "INT", //
+    "BIGINT", //
+    "BOOLEAN", //
+    "FLOAT", //
+    "DOUBLE", //
+    "STRING", //
+    "BINARY", // -- (Note: Available in Hive 0.8.0 and later)
+    "TIMESTAMP", // -- (Note: Available in Hive 0.8.0 and later)
+    "DECIMAL", // -- (Note: Available in Hive 0.11.0 and later)
+    "DATE", // -- (Note: Available in Hive 0.12.0 and later)
+    "VARCHAR", // -- (Note: Available in Hive 0.12.0 and later)
+    "CHAR" // -- (Note: Available in Hive 0.13.0 and later)
   ],
   isFirstRowHeaderDidChange: function () {
     console.log("inside onFirstRowHeader : isFirstRowHeader : " + this.get('isFirstRowHeader'));
@@ -59,7 +91,7 @@ export default Ember.Controller.extend({
         if (this.get('rows')) {
           this.get('rows').unshiftObject({row: this.get('firstRow')});
         }
-      } else {
+      } else if( this.get('header') ) { // headers are available
         // take first row of
         this.get('header').forEach(function (item, index) {
           console.log("item : ", item);
@@ -74,23 +106,43 @@ export default Ember.Controller.extend({
     }
   }.observes('isFirstRowHeader'),
 
-  uploadForPreview: function (files) {
-    console.log("uploaderForPreview called.");
-    return this.get('uploader').uploadFiles('preview', files);
+  popUploadProgressInfos : function(){
+    this.get('uploadProgressInfos').popObject();
+  },
+
+  pushUploadProgressInfos : function(info){
+    this.get('uploadProgressInfos').pushObject(info);
+  },
+
+  clearUploadProgressModal : function(){
+    for( var i = 0 ; i < this.get('uploadProgressInfos').length ; i++){
+      this.popUploadProgressInfos();
+    }
+  },
+
+  hideUploadModal : function(){
+    console.log("hiding the modal ....");
+    this.clearUploadProgressModal();
+    Ember.$("#uploadProgressModal").modal("hide");
+  },
+
+  showUploadModal : function(){
+    Ember.$("#uploadProgressModal").modal("show");
   },
 
   clearFields: function () {
+    this.set("hdfsPath");
     this.set("header");
     this.set("rows");
     this.set("error");
-    this.set('isFirstRowHeader');
+    this.set('isFirstRowHeader',true);
     this.set('files');
     this.set("firstRow");
     this.set("selectedDatabase");
     this.set("databaseName");
     this.set("filePath");
     this.set('tableName');
-
+    this.clearUploadProgressModal();
     this.printValues();
   },
 
@@ -103,65 +155,397 @@ export default Ember.Controller.extend({
     console.log("files : ", this.get('files'));
     console.log("firstRow : ", this.get('firstRow'));
   },
+
+  generateTempTableName : function(){
+    var text = "";
+    var possible = "abcdefghijklmnopqrstuvwxyz";
+
+    for( var i=0; i < 30; i++ )
+      text += possible.charAt(Math.floor(Math.random() * possible.length));
+
+    return text;
+  },
+
+  waitForJobStatus: function (jobId, resolve, reject) {
+    console.log("finding status of job: ", jobId);
+    var self = this;
+    var fetchJobPromise = this.get('jobService').fetchJobStatus(jobId);
+//      console.log("fetchJobPromise : ", fetchJobPromise);
+
+      fetchJobPromise.then(function (data) {
+        console.log("waitForJobStatus : data : ", data);
+        var status = data.jobStatus;
+        if (status == "Succeeded") {
+          console.log("resolving waitForJobStatus with : " , status);
+          resolve(status);
+        } else if (status == "Canceled" || status == "Closed" || status == "Error") {
+          console.log("rejecting waitForJobStatus with : " + status);
+          reject(new Error(status));
+        } else {
+          console.log("retrying waitForJobStatus : ");
+          self.waitForJobStatus(jobId, resolve, reject);
+        }
+      }, function (error) {
+        console.log("rejecting waitForJobStatus with : " + error);
+        reject(error);
+    })
+  },
+
+  uploadForPreview: function (files) {
+    console.log("uploaderForPreview called.");
+    var self = this;
+    return this.get('uploader').uploadFiles('preview', files, {"isFirstRowHeader" : self.get("isFirstRowHeader"), "inputFileType" : self.get("inputFileType").id});
+  },
+
+  uploadForPreviewFromHDFS : function(){
+    console.log("uploadForPreviewFromHDFS called.");
+    return this.get('uploader').previewFromHDFS({"isFirstRowHeader" : this.get("isFirstRowHeader"),"inputFileType" : this.get("inputFileType").id , "hdfsPath" : this.get("hdfsPath") });
+  },
+
+  generatePreview : function(files){
+    var self = this;
+    var promise = null;
+    if(this.get('isLocalUpload')){
+      promise = this.uploadForPreview(files);
+    }else{
+      promise = this.uploadForPreviewFromHDFS();
+    }
+
+    return promise.then(function (data) {
+        self.onGeneratePreviewSuccess(data);
+    }, function (error) {
+        self.onGeneratePreviewFailure(error);
+    });
+  },
+
+  waitForGeneratingPreview: function () {
+    console.log("waitForGeneratingPreview");
+    this.showUploadModal();
+    this.pushUploadProgressInfos("<li> Generating Preview .... </li>")
+  },
+
   previewTable: function (data) {
     console.log('inside previewTable');
     this.set("header", data.header);
-    this.set("rows", data.rows);
     this.set("firstRow", data.rows[0].row);
     console.log("firstRow : ", this.get('firstRow'));
     this.set('isFirstRowHeader', data.isFirstRowHeader);
+    this.set('tableName',data.tableName);
+    if(data.isFirstRowHeader == true){
+        data.rows = data.rows.slice(1);
+    }
+    this.set("rows", data.rows);
+  },
+
+  onGeneratePreviewSuccess: function (data) {
+    console.log("onGeneratePreviewSuccess");
+    this.hideUploadModal();
+    this.previewTable(data);
+  },
+
+  onGeneratePreviewFailure: function (error) {
+    console.log("onGeneratePreviewFailure");
+    this.hideUploadModal();
+    this.setError(error);
+  },
+
+  createActualTable : function(){
+    console.log("createActualTable");
+    return this.createTable();
   },
 
-  fetchCreateTableStatus: function (jobId, resolve, reject) {
+  waitForCreateActualTable: function (jobId) {
+    console.log("waitForCreateActualTable");
+    this.pushUploadProgressInfos("<li>Creating actual table.... </li>");
     var self = this;
-    this.get('uploader').getCreateTableResult(jobId).then(function (data) {
-      console.log("fetchCreateTableStatus : data : ", data);
-      var status = data.status;
-      if (status == "Succeeded") {
-        console.log("resolving fetchCreateTableStatus with : " + data);
-        resolve(status);
-      } else if (status == "Canceled" || status == "Closed" || status == "Error") {
-        console.log("rejecting fetchCreateTableStatus with : " + status);
-        reject(new Error(status));
-      } else {
-        console.log("retrying fetchCreateTableStatus : ");
-        self.fetchCreateTableStatus(jobId, resolve, reject);
-      }
-    }, function (error) {
-      console.log("rejecting fetchCreateTableStatus with : " + error);
-      reject(error);
-    })
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
+    });
+
+    return p;
+  },
+
+  onCreateActualTableSuccess : function(){
+    console.log("onCreateTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos("<li> Successfully created actual table. </li>");
+  },
+
+  onCreateActualTableFailure : function(error){
+    console.log("onCreateActualTableFailure");
+    this.setError(error);
   },
 
-  waitForResult: function (jobId) {
+  createTempTable : function(){
+    console.log("createTempTable");
+    var tempTableName = this.generateTempTableName();
+    this.set('tempTableName',tempTableName);
+    return this.get('uploader').createTable({
+      "isFirstRowHeader": this.get("isFirstRowHeader"),
+      "header": this.get("header"),
+      "tableName": tempTableName,
+      "databaseName": this.get('databaseName'),
+      "fileType":"TEXTFILE"
+    });
+  },
+
+  waitForCreateTempTable: function (jobId) {
+    console.log("waitForCreateTempTable");
+    this.pushUploadProgressInfos("<li>Creating temporary table.... </li>");
     var self = this;
-    return new Ember.RSVP.Promise(function (resolve, reject) {
-      self.fetchCreateTableStatus(jobId,resolve,reject);
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
     });
+
+    return p;
+  },
+
+  onCreateTempTableSuccess : function(){
+    console.log("onCreateTempTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos("<li> Successfully created temporary table. </li>");
+  },
+
+  onCreateTempTableFailure : function(error){
+    console.log("onCreateTempTableFailure");
+    this.setError(error);
+  },
+
+  uploadFile : function(){
+    console.log("uploadFile");
+    if( this.get("isLocalUpload")){
+      this.pushUploadProgressInfos("<li>Uploading file .... </li>");
+      return this.uploadTable();
+    }else{
+      return this.uploadTableFromHdfs();
+    }
+  },
+
+  waitForUploadingFile: function (data) {
+    console.log("waitForUploadingFile");
+    if( data.jobId ){
+      this.pushUploadProgressInfos("<li>Uploading file .... </li>");
+      var self = this;
+          var p = new Ember.RSVP.Promise(function (resolve, reject) {
+            self.waitForJobStatus(data.jobId, resolve, reject);
+          });
+      return p;
+    }else{
+      return  Ember.RSVP.Promise.resolve(data);
+    }
+  },
+
+  onUploadingFileSuccess: function () {
+    console.log("onUploadingFileSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos("<li> Successfully uploaded file. </li>");
+  },
+
+  onUploadingFileFailure: function (error) {
+    console.log("onUploadingFileFailure");
+    this.setError(error);
+  },
+
+  insertIntoTable : function(){
+    console.log("insertIntoTable");
+
+    return this.get('uploader').insertIntoTable({
+      "fromDatabase":  this.get("databaseName"),
+      "fromTable": this.get("tempTableName"),
+      "toDatabase": this.get("databaseName"),
+      "toTable": this.get("tableName")
+    });
+  },
+
+  waitForInsertIntoTable: function (jobId) {
+    console.log("waitForInsertIntoTable");
+    this.pushUploadProgressInfos("<li>Inserting rows from temporary table to actual table .... </li>");
+    var self = this;
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
+    });
+
+    return p;
+  },
+
+  onInsertIntoTableSuccess : function(){
+    console.log("onInsertIntoTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos("<li>Successfully inserted rows from temporary table to actual table. </li>");
+  },
+
+  onInsertIntoTableFailure : function(error){
+    console.log("onInsertIntoTableFailure");
+    this.setError(error);
+  },
+
+  deleteTempTable : function(){
+    console.log("deleteTempTable");
+
+    return this.get('uploader').deleteTable({
+      "database":  this.get("databaseName"),
+      "table": this.get("tempTableName")
+    });
+  },
+
+  waitForDeleteTempTable: function (jobId) {
+    console.log("waitForDeleteTempTable");
+    this.pushUploadProgressInfos("<li>Deleting temporary table .... </li>");
+    var self = this;
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
+    });
+
+    return p;
+  },
+
+  onDeleteTempTableSuccess : function(){
+    console.log("onDeleteTempTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos("<li>Successfully inserted row. </li>");
+    this.onUploadSuccessfull();
+  },
+
+  onDeleteTempTableFailure : function(error){
+    console.log("onDeleteTempTableFailure");
+    this.setError(error);
+  },
+
+  createTableAndUploadFile : function(){
+    var self = this;
+    self.setError();
+    self.showUploadModal();
+    self.createActualTable()
+      .then(function(data){
+        console.log("1. received data : ", data);
+        return self.waitForCreateActualTable(data.jobId);
+      },function(error){
+        self.onCreateActualTableFailure(error);
+        console.log("Error occurred: ", error);
+        throw error;
+      })
+      .then(function(data){
+        console.log("2. received data : ", data);
+        self.onCreateActualTableSuccess(data);
+        return self.createTempTable(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onCreateActualTableFailure(new Error("Server job for creation of actual table failed."));
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("3. received data : ", data);
+        return self.waitForCreateTempTable(data.jobId);
+      },function(error){
+        if(!self.get('error')){
+          self.onCreateTempTableFailure(error);
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("4. received data : ", data);
+        self.onCreateTempTableSuccess(data);
+        return self.uploadFile(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onCreateTempTableFailure(new Error("Server job for creation of temporary table failed."));
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      }).then(function(data){
+        console.log("4.5 received data : ", data);
+        return self.waitForUploadingFile(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onUploadingFileFailure(error);
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("5. received data : ", data);
+        self.onUploadingFileSuccess(data);
+        return self.insertIntoTable(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onUploadingFileFailure(new Error("Server job for upload of file failed."));
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("6. received data : ", data);
+        return self.waitForInsertIntoTable(data.jobId);
+      },function(error){
+        if(!self.get('error')){
+          self.onInsertIntoTableFailure(error);
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("7. received data : ", data);
+        self.onInsertIntoTableSuccess(data);
+        return self.deleteTempTable(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onInsertIntoTableFailure(new Error("Server job for insert from temporary to actual table failed."));
+          self.setError(error);
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("8. received data : ", data);
+        return self.waitForDeleteTempTable(data.jobId);
+      },function(error){
+        if(!self.get('error')){
+          self.onDeleteTempTableFailure(error);
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("9. received data : ", data);
+        self.onDeleteTempTableSuccess(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onDeleteTempTableFailure(new Error("Server job for deleting temporary table failed."));
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      }).finally(function(){
+        console.log("finally hide the modal always");
+        self.hideUploadModal();
+      });
   },
 
   createTable: function () {
-    var headers = JSON.stringify(this.get('header'));
+    console.log("table headers : ", this.get('header'));
+    var headers = this.get('header');
 
     var selectedDatabase = this.get('selectedDatabase');
-    if( null == selectedDatabase || typeof selectedDatabase === 'undefined'){
-      throw new Error(constants.hive.errors.emptyDatabase);
+    if (null == selectedDatabase || typeof selectedDatabase === 'undefined') {
+      throw new Error(Ember.I18n.t('hive.errors.emptyDatabase'));
     }
 
-    this.set('databaseName',this.get('selectedDatabase').get('name'));
+    this.set('databaseName', this.get('selectedDatabase').get('name'));
     var databaseName = this.get('databaseName');
     var tableName = this.get('tableName');
     var isFirstRowHeader = this.get('isFirstRowHeader');
-    console.log("databaseName : " , databaseName, ", tableName : ", tableName, ", isFirstRowHeader : " , isFirstRowHeader , ", headers : ", headers);
+    var filetype = this.get("selectedFileType");
 
-    if( null == databaseName || typeof databaseName === 'undefined'){
-      throw new Error(constants.hive.errors.emptyDatabase);
+    if (null == databaseName || typeof databaseName === 'undefined' || databaseName == '') {
+      throw new Error(Ember.I18n.t('hive.errors.emptyDatabase'));
     }
-    if( null == tableName || typeof tableName === 'undefined'){
-      throw new Error(constants.hive.errors.emptyTableName);
+    if (null == tableName || typeof tableName === 'undefined' || tableName == '') {
+      throw new Error(Ember.I18n.t('hive.errors.emptyTableName'));
     }
-    if( null == isFirstRowHeader || typeof isFirstRowHeader === 'undefined'){
-      throw new Error(constants.hive.errors.emptyIsFirstRow);
+    if (null == isFirstRowHeader || typeof isFirstRowHeader === 'undefined') {
+      throw new Error(Ember.I18n.t('hive.errors.emptyIsFirstRow'));
     }
 
     this.validateColumns();
@@ -170,35 +554,54 @@ export default Ember.Controller.extend({
       "isFirstRowHeader": isFirstRowHeader,
       "header": headers,
       "tableName": tableName,
-      "databaseName": databaseName
+      "databaseName": databaseName,
+      "fileType":filetype
     });
   },
 
-  validateColumns: function(){
-    // TODO :check validation of columnames.
+  validateColumns: function () {
     // throw exception if invalid.
   },
-  setError: function(error){
-    this.set('error',JSON.stringify(error));
-    console.log("upload table error : ",error);
-    this.get('notifyService').error(error);
+  setError: function (error) {
+    if(error){
+      console.log("upload table error : ", error);
+      this.set('error', JSON.stringify(error));
+      this.get('notifyService').error(error);
+    }else{
+      this.set("error");
+    }
   },
 
   previewError: function (error) {
     this.setError(error);
   },
 
+  uploadTableFromHdfs : function(){
+    console.log("uploadTableFromHdfs called.");
+    if(!(this.get("inputFileTypeCSV") == true && this.get("isFirstRowHeader") == false) ){
+      this.pushUploadProgressInfos("<li>Uploading file .... </li>");
+    }
+    return  this.get('uploader').uploadFromHDFS({
+        "isFirstRowHeader": this.get("isFirstRowHeader"),
+        "databaseName" :  this.get('databaseName'),
+        "tableName" : this.get("tempTableName"),
+        "inputFileType" : this.get("inputFileType").id,
+        "hdfsPath" : this.get("hdfsPath")
+      });
+  },
   uploadTable: function () {
     this.printValues();
     return this.get('uploader').uploadFiles('upload', this.get('files'), {
       "isFirstRowHeader": this.get("isFirstRowHeader"),
-      "filePath": this.get('filePath')
+      "databaseName" :  this.get('databaseName'),
+      "tableName" : this.get("tempTableName"),
+      "inputFileType" : this.get("inputFileType").id
     });
   },
 
   onUploadSuccessfull: function (data) {
     console.log("onUploadSuccessfull : ", data);
-    this.get('notifyService').success( "Uploaded Successfully", "Table " + this.get('tableName') + " created in database " + this.get("databaseName"));
+    this.get('notifyService').success("Uploaded Successfully", "Table " + this.get('tableName') + " created in database " + this.get("databaseName"));
     this.clearFields();
   },
 
@@ -206,9 +609,18 @@ export default Ember.Controller.extend({
     console.log("onUploadError : ", error);
     this.setError(error);
   },
-
+  showOrHide: function () {
+    if (this.get('show') == false) {
+      this.set("displayOption", "display:none");
+      this.set("showMoreOrLess", "Show More");
+    } else {
+      this.set("displayOption", "display:table-row");
+      this.set("showMoreOrLess", "Show Less");
+    }
+  },
+  displayOption: "display:none",
   actions: {
-    toggleErrors : function(){
+    toggleErrors: function () {
       this.toggleProperty('showErrors');
     },
     filesUploaded: function (files) {
@@ -222,40 +634,22 @@ export default Ember.Controller.extend({
       var tableName = name.substr(0, i);
       this.set('tableName', tableName);
       var self = this;
-      return this.uploadForPreview(files).then(function (data) {
-        self.previewTable(data);
-      }, function (error) {
-        self.previewError(error);
-      });
+      return this.generatePreview(files)
     },
-
-    createTableAndUploadFile: function () {
-      var self = this;
-
-      try {
-        this.createTable()
-          .then(function (jobData) {
-            console.log("jobData : ", jobData);
-            self.set('filePath', jobData.filePath);
-            self.waitForResult(jobData.jobId)
-              .then(function (successStatus) {
-                console.log("successStatus : ", successStatus);
-                self.uploadTable().then(function (operationData) {
-                  console.log("operation successfull operationData : ", operationData);
-                  self.onUploadSuccessfull(operationData);
-                }, function (error) {
-                  self.onUploadError(error);
-                });
-              }, function (error) {
-                self.onUploadError(error);
-              })
-          }, function (error) {
-            self.onUploadError(error);
-          })
+    previewFromHdfs : function(){
+      return this.generatePreview();
+    },
+    uploadTable : function(){
+      try{
+        this.createTableAndUploadFile();
       }catch(e){
-        self.onUploadError(e);
+        console.log("exception occured : ", e);
+        this.setError(e);
+        this.hideUploadModal();
       }
+    },
+    uploadFromHDFS : function(){
+      this.set("isLocalUpload",false);
     }
-
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
index bd0e6e6..46847ae 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
@@ -201,6 +201,7 @@ TRANSLATIONS = {
     collapse: 'Collapse message',
     previousPage: 'previous',
     uploadTable: 'Upload Table',
+    showPreview: 'Preview',
     nextPage: 'next',
     loadMore: 'Load more...',
     saveHdfs: 'Save to HDFS',

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/services/job.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/services/job.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/services/job.js
index 3a1f82a..6cb4170 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/services/job.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/services/job.js
@@ -37,5 +37,20 @@ export default Ember.Service.extend({
          job.reload();
        }
     });
+  },
+
+  fetchJobStatus: function (jobId) {
+    console.log("finding status of job : ", jobId);
+    var self = this;
+    var url = this.container.lookup('adapter:application').buildURL();
+    url +=  "/jobs/" + jobId + "/status";
+
+    return Ember.$.ajax({
+      url: url,
+      type: 'GET',
+      headers: {
+        'X-Requested-By': 'ambari'
+      }
+    });
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss b/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
index 31dafdd..716dc02 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
@@ -579,7 +579,7 @@ tree-view ul li {
   margin: 0 0 10px;
 }
 
-#query-results, #upload-table  {
+#query-results, #upload-table{
   .table {
     display: inline-block;
     overflow: auto;
@@ -589,11 +589,53 @@ tree-view ul li {
     margin-top: 10px;
   }
 
-  input {
+  input, .selectize-input {
     width: 300px;
   }
 }
 
+#upload-controls {
+  .table {
+    display: inline-block;
+    overflow: auto;
+  }
+
+  input, .selectize-input {
+    width: 94px;
+    font-size : smaller;
+  }
+
+  .hidden {
+    visibility:hidden;
+  }
+
+  .visible {
+    visibility:visible;
+  }
+
+  td {
+    padding-right : 5px;
+  }
+}
+
+
+.data-upload-form tr td {
+    padding-right : 5px;
+}
+
+td.data-upload-form-label {
+    width: 150px;
+    padding-left: 50px;
+}
+td.data-upload-form-field {
+    width: 350px;
+}
+
+#hdfs-param input {
+  width: 80%;
+  display: inline;
+}
+
 #visualization{
     .max-rows {
       float: right;
@@ -609,8 +651,13 @@ tree-view ul li {
 .mozBoxSizeFix {
   -moz-box-sizing: border-box;
 }
-
-#isFirstRowHeader {
+.show-data {
+    display : block;
+}
+.hide-data {
+    display : none;
+}
+.no-shadow {
   box-shadow: none;
   width: inherit;
 }
@@ -618,3 +665,7 @@ tree-view ul li {
 .widget-controls {
   padding-right: 10px;
 }
+
+table.no-border, table.no-border tr, table.no-border tr td {
+  border: none;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/input-header.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/input-header.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/input-header.hbs
new file mode 100644
index 0000000..b2349fa
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/input-header.hbs
@@ -0,0 +1,20 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<td  {{bind-attr class="noPrecision:hidden:visible"}}>{{input type="number" placeholder="precision" class="form-control" value=column.precision }}</td>
+<td {{bind-attr class="noScale:hidden:visible"}}>{{input placeholder="scale" type="number" class="form-control" value=column.scale }}</td>
\ No newline at end of file