You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by pa...@apache.org on 2016/02/26 10:59:20 UTC

[2/3] ambari git commit: AMBARI-15046. Hive view Upload table feature now supports upload from HDFS and Local.Supports JSON/XML/CSV as uploaded files and ORC and all other hive internal storage types. (Nitiraj Rathore via pallavkul)

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/PreviewData.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/PreviewData.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/PreviewData.java
new file mode 100644
index 0000000..8fcae95
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/PreviewData.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.client.Row;
+
+import java.util.List;
+
+/**
+ * Encapsulating preview data from parser.
+ */
+public class PreviewData {
+  private List<ColumnDescription> header;
+  private List<Row> previewRows;
+
+  public PreviewData() {
+  }
+
+  public PreviewData(List<ColumnDescription> header, List<Row> previewRows) {
+    this.header = header;
+    this.previewRows = previewRows;
+  }
+
+  public List<ColumnDescription> getHeader() {
+    return header;
+  }
+
+  public void setHeader(List<ColumnDescription> header) {
+    this.header = header;
+  }
+
+  public List<Row> getPreviewRows() {
+    return previewRows;
+  }
+
+  public void setPreviewRows(List<Row> previewRows) {
+    this.previewRows = previewRows;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java
new file mode 100644
index 0000000..69fe864
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+import org.apache.ambari.view.hive.client.Row;
+
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+
+/**
+ * Converts the Map of values created by JSON/XML Parser into ordered values in Row
+ * Takes RowMapIterator as input
+ */
+public class RowIterator implements Iterator<Row> {
+
+  private LinkedList<String> headers = null;
+  private RowMapIterator iterator;
+
+  /**
+   * creates a row iterator for the map values in RowMapIterator
+   * keeps the keys in map as header.
+   * @param iterator
+   */
+  public RowIterator(RowMapIterator iterator) {
+    this.iterator = iterator;
+    LinkedHashMap<String, String> obj = iterator.peek();
+    if (null != obj)
+      headers = new LinkedList<>(obj.keySet());
+  }
+
+  @Override
+  public boolean hasNext() {
+    return iterator.hasNext();
+  }
+
+
+  @Override
+  public Row next() {
+    LinkedHashMap<String, String> r = this.iterator.next();
+    if (null == r) {
+      return null;
+    }
+
+    return convertToRow(r);
+  }
+
+  @Override
+  public void remove() {
+    iterator.remove();
+  }
+
+  /**
+   * @return : ordered collection of string of headers
+   */
+  public LinkedList<String> extractHeaders() {
+    return headers;
+  }
+
+  /**
+   * converts the map into a Row
+   * @param lr
+   * @return
+   */
+  private Row convertToRow(LinkedHashMap<String, String> lr) {
+    Object[] data = new Object[headers.size()];
+    int i = 0;
+    for (String cd : headers) {
+      String d = lr.get(cd);
+
+      if (d != null)
+        d = d.trim(); // trim to remove any \n etc which is used as a separator for rows in TableDataReader
+
+      data[i++] = d;
+    }
+
+    return new Row(data);
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowMapIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowMapIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowMapIterator.java
new file mode 100644
index 0000000..a0ff2d7
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowMapIterator.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers;
+
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+
+/**
+ * iterator which generates Ordered Map of column name and values for each row from streams like JSON and XML
+ */
+public interface RowMapIterator extends Iterator<LinkedHashMap<String, String>> {
+  LinkedHashMap<String, String> peek() ;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java
new file mode 100644
index 0000000..3342f49
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.csv;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.commons.csv.CSVRecord;
+
+import java.util.Iterator;
+
+/**
+ * iterates over the input CSV records and generates Row objects
+ */
+class CSVIterator implements Iterator<Row> {
+
+  private Iterator<CSVRecord> iterator;
+
+  public CSVIterator(Iterator<CSVRecord> iterator) {
+    this.iterator = iterator;
+  }
+
+  @Override
+  public boolean hasNext() {
+    return iterator.hasNext();
+  }
+
+  @Override
+  public Row next() {
+    CSVRecord row = iterator.next();
+    Object[] values = new Object[row.size()];
+    for (int i = 0; i < values.length; i++) {
+      values[i] = row.get(i);
+    }
+    Row r = new Row(values);
+    return r;
+  }
+
+  @Override
+  public void remove() {
+    this.iterator.remove();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java
new file mode 100644
index 0000000..56cff58
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive.resources.uploads.parsers.csv;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.Parser;
+import org.apache.commons.csv.CSVFormat;
+
+import java.io.*;
+import java.util.*;
+
+/**
+ * Parses the given Reader which contains CSV stream and extracts headers and rows, and detect datatypes of columns
+ */
+public class CSVParser extends Parser {
+
+  private CSVIterator iterator;
+  private org.apache.commons.csv.CSVParser parser;
+
+  public CSVParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    parser = new org.apache.commons.csv.CSVParser(this.reader, CSVFormat.EXCEL);
+    iterator = new CSVIterator(parser.iterator());
+  }
+
+  @Override
+  public Row extractHeader() {
+    return this.iterator().next();
+  }
+
+  @Override
+  public void close() throws IOException {
+    this.parser.close();
+  }
+
+  public Iterator<Row> iterator() {
+    return iterator; // only one iterator per parser.
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONIterator.java
new file mode 100644
index 0000000..534d9e7
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONIterator.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.json;
+
+import com.google.gson.stream.JsonReader;
+import com.google.gson.stream.JsonToken;
+import org.apache.ambari.view.hive.resources.uploads.parsers.EndOfDocumentException;
+import org.apache.ambari.view.hive.resources.uploads.parsers.RowMapIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.LinkedHashMap;
+
+/**
+ * iterates over the JsonReader and reads creates row data
+ * assumes the array of json objects.
+ * eg : [ { "col1Name" : "value-1-1", "col2Name" : "value-1-2"}, { "col1Name" : "value-2-1", "col2Name" : "value-2-2"}]
+ */
+class JSONIterator implements RowMapIterator {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(JSONIterator.class);
+
+  private LinkedHashMap<String, String> nextObject = null;
+
+  private LinkedHashMap<String, String> readNextObject(JsonReader reader) throws IOException, EndOfDocumentException {
+    LinkedHashMap<String, String> row = new LinkedHashMap<>();
+    boolean objectStarted = false;
+    boolean shouldBeName = false;
+    String currentName = null;
+
+    while (true) {
+      JsonToken token = reader.peek();
+      switch (token) {
+        case BEGIN_ARRAY:
+          throw new IllegalArgumentException("Row data cannot have an array.");
+        case END_ARRAY:
+          throw new EndOfDocumentException("End of Json Array document.");
+        case BEGIN_OBJECT:
+          if (objectStarted == true) {
+            throw new IllegalArgumentException("Nested objects not supported.");
+          }
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("name expected, got begin_object");
+          }
+          objectStarted = true;
+          shouldBeName = true;
+          reader.beginObject();
+          break;
+        case END_OBJECT:
+          if (shouldBeName == false) {
+            throw new IllegalArgumentException("value expected, got end_object");
+          }
+          reader.endObject();
+          return row;
+        case NAME:
+          if (shouldBeName == false) {
+            throw new IllegalArgumentException("name not expected at this point.");
+          }
+          shouldBeName = false;
+          currentName = reader.nextName();
+          break;
+        case NUMBER:
+        case STRING:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          String n = reader.nextString();
+          row.put(currentName, n);
+          shouldBeName = true;
+          break;
+        case BOOLEAN:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          String b = String.valueOf(reader.nextBoolean());
+          row.put(currentName, b);
+          shouldBeName = true;
+          break;
+        case NULL:
+          if (shouldBeName == true) {
+            throw new IllegalArgumentException("value not expected at this point.");
+          }
+          reader.nextNull();
+          row.put(currentName, "");
+          shouldBeName = true;
+          break;
+        case END_DOCUMENT:
+          return row;
+
+        default:
+          throw new IllegalArgumentException("Illegal token detected inside json: token : " + token.toString());
+      }
+    }
+  }
+
+  private JsonReader reader;
+
+  public JSONIterator(JsonReader reader) throws IOException {
+    this.reader = reader;
+    // test the start of array
+    JsonToken jt = reader.peek();
+    if (jt != JsonToken.BEGIN_ARRAY) {
+      throw new IllegalArgumentException("Expected the whole document to contain a single JsonArray.");
+    }
+
+    reader.beginArray(); // read the start of array
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+    }
+  }
+
+  @Override
+  public boolean hasNext() {
+    return null != nextObject;
+  }
+
+  public LinkedHashMap<String, String> peek() {
+    return nextObject;
+  }
+
+  @Override
+  public LinkedHashMap<String, String> next() {
+    LinkedHashMap<String, String> currObject = nextObject;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+      LOG.debug("End of Json document reached with next character ending the JSON Array.");
+      nextObject = null;
+    } catch (Exception e){
+      // for any other exception throw error right away
+      throw new IllegalArgumentException(e);
+    }
+    return currObject;
+  }
+
+  @Override
+  public void remove() {
+    // no operation.
+    LOG.info("No operation when remove called on JSONIterator.");
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONParser.java
new file mode 100644
index 0000000..1d61798
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/json/JSONParser.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.json;
+
+import com.google.gson.stream.JsonReader;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.Parser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.RowIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Collection;
+import java.util.Iterator;
+
+
+/**
+ * Parses the input data from reader as JSON and provides iterator for rows.
+ *
+ * Expects the input reader to contains a JsonArray in which each element is a JsonObject
+ * corresponding to the row.
+ * eg. :
+ *
+ * [
+ *  {row1-col1, row1-col2, row1-col3},
+ *  {row2-col1, row2-col2, row2-col3}
+ * ]
+ *
+ */
+public class JSONParser extends Parser {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(JSONParser.class);
+
+  private RowIterator iterator;
+  private JsonReader jsonReader;
+  private JSONIterator JSONIterator;
+
+  public JSONParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    this.jsonReader = new JsonReader(this.reader);
+    JSONIterator = new JSONIterator(this.jsonReader);
+    iterator = new RowIterator(JSONIterator);
+  }
+
+  @Override
+  public Row extractHeader() {
+    Collection<String> headers = this.iterator.extractHeaders();
+    Object[] objs = new Object[headers.size()];
+    Iterator<String> iterator = headers.iterator();
+    for(int i = 0 ; i < headers.size() ; i++){
+      objs[i] = iterator.next();
+    }
+
+    return new Row(objs);
+  }
+
+  @Override
+  public void close() throws IOException {
+    this.jsonReader.close();
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return iterator;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLIterator.java
new file mode 100644
index 0000000..5852dfc
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLIterator.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.xml;
+
+import org.apache.ambari.view.hive.resources.uploads.parsers.EndOfDocumentException;
+import org.apache.ambari.view.hive.resources.uploads.parsers.RowMapIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.namespace.QName;
+import javax.xml.stream.XMLEventReader;
+import javax.xml.stream.XMLStreamConstants;
+import javax.xml.stream.XMLStreamException;
+import javax.xml.stream.events.*;
+import java.io.IOException;
+import java.util.LinkedHashMap;
+
+/**
+ * assumes XML of following format
+ * <table>
+ * <row>
+ * <col name="col1Name">row1-col1-Data</col>
+ * <col name="col2Name">row1-col2-Data</col>
+ * <col name="col3Name">row1-col3-Data</col>
+ * <col name="col4Name">row1-col4-Data</col>
+ * </row>
+ * <row>
+ * <col name="col1Name">row2-col1-Data</col>
+ * <col name="col2Name">row2-col2-Data</col>
+ * <col name="col3Name">row2-col3-Data</col>
+ * <col name="col4Name">row2-col4-Data</col>
+ * </row>
+ * </table>
+ */
+class XMLIterator implements RowMapIterator {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(XMLIterator.class);
+
+  private LinkedHashMap<String, String> nextObject = null;
+  private static final String TAG_TABLE = "table";
+  private static final String TAG_ROW = "row";
+  private static final String TAG_COL = "col";
+  private boolean documentStarted = false;
+  private XMLEventReader reader;
+
+  public XMLIterator(XMLEventReader reader) throws IOException {
+    this.reader = reader;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (EndOfDocumentException e) {
+      LOG.debug("error : {}", e);
+    } catch (XMLStreamException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public boolean hasNext() {
+    return null != nextObject;
+  }
+
+  public LinkedHashMap<String, String> peek() {
+    return nextObject;
+  }
+
+  @Override
+  public LinkedHashMap<String, String> next() {
+    LinkedHashMap<String, String> currObject = nextObject;
+    try {
+      nextObject = readNextObject(this.reader);
+    } catch (IOException e) {
+      LOG.error("Exception occured while reading the next row from XML : {} ", e);
+      nextObject = null;
+    } catch (EndOfDocumentException e) {
+      LOG.debug("End of XML document reached with next character ending the XML.");
+      nextObject = null;
+    } catch (XMLStreamException e) {
+      LOG.error("Exception occured while reading the next row from XML : {} ", e);
+      nextObject = null;
+    }
+    return currObject;
+  }
+
+  @Override
+  public void remove() {
+    // no operation.
+    LOG.info("No operation when remove called.");
+  }
+
+  private LinkedHashMap<String, String> readNextObject(XMLEventReader reader) throws IOException, EndOfDocumentException, XMLStreamException {
+    LinkedHashMap<String, String> row = new LinkedHashMap<>();
+    boolean objectStarted = false;
+    String currentName = null;
+
+    while (true) {
+      XMLEvent event = reader.nextEvent();
+      switch (event.getEventType()) {
+        case XMLStreamConstants.START_ELEMENT:
+          StartElement startElement = event.asStartElement();
+          String qName = startElement.getName().getLocalPart();
+          LOG.debug("startName : {}" , qName);
+          switch (qName) {
+            case TAG_TABLE:
+              if (documentStarted) {
+                throw new IllegalArgumentException("Cannot have a <table> tag nested inside another <table> tag");
+              } else {
+                documentStarted = true;
+              }
+              break;
+            case TAG_ROW:
+              if (objectStarted) {
+                throw new IllegalArgumentException("Cannot have a <row> tag nested inside another <row> tag");
+              } else {
+                objectStarted = true;
+              }
+              break;
+            case TAG_COL:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray tag " + qName);
+              }
+              Attribute nameAttr = startElement.getAttributeByName( new QName("name"));
+              if( null == nameAttr ){
+                throw new IllegalArgumentException("Missing name attribute in col tag.");
+              }
+              currentName = nameAttr.getValue();
+              break;
+            default:
+              throw new IllegalArgumentException("Illegal start tag " + qName + " encountered.");
+          }
+          break;
+        case XMLStreamConstants.END_ELEMENT:
+          EndElement endElement = event.asEndElement();
+          String name = endElement.getName().getLocalPart();
+          LOG.debug("endName : {}", name);
+          switch (name) {
+            case TAG_TABLE:
+              if (!documentStarted) {
+                throw new IllegalArgumentException("Stray </table> tag.");
+              }
+              throw new EndOfDocumentException("End of XML document.");
+
+            case TAG_ROW:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray </row> tag.");
+              }
+              return row;
+
+            case TAG_COL:
+              if (!objectStarted) {
+                throw new IllegalArgumentException("Stray tag " + name);
+              }
+              currentName = null;
+              break;
+
+            default:
+              throw new IllegalArgumentException("Illegal start ending " + name + " encountered.");
+          }
+          break;
+        case XMLStreamConstants.CHARACTERS:
+          Characters characters = event.asCharacters();
+          if (characters.isWhiteSpace() && currentName == null)
+            break;
+          String data = characters.getData();
+          LOG.debug("character data : {}", data);
+          if (currentName == null) {
+            throw new IllegalArgumentException("Illegal characters outside any tag : " + data);
+          } else {
+            String oldData = row.get(currentName);
+            if (null != oldData) {
+              data = oldData + data;
+            }
+            row.put(currentName, data);
+          }
+          break;
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLParser.java
new file mode 100644
index 0000000..74bf272
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/xml/XMLParser.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.xml;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.Parser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.RowIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.stream.XMLEventReader;
+import javax.xml.stream.XMLInputFactory;
+import javax.xml.stream.XMLStreamException;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Collection;
+import java.util.Iterator;
+
+/**
+ * assumes XML of following format
+ * <table>
+ * <row>
+ * <col name="col1Name">row1-col1-Data</col>
+ * <col name="col2Name">row1-col2-Data</col>
+ * <col name="col3Name">row1-col3-Data</col>
+ * <col name="col4Name">row1-col4-Data</col>
+ * </row>
+ * <row>
+ * <col name="col1Name">row2-col1-Data</col>
+ * <col name="col2Name">row2-col2-Data</col>
+ * <col name="col3Name">row2-col3-Data</col>
+ * <col name="col4Name">row2-col4-Data</col>
+ * </row>
+ * </table>
+ */
+public class XMLParser extends Parser {
+
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(XMLParser.class);
+
+  private RowIterator iterator;
+  private XMLEventReader xmlReader;
+  private XMLIterator xmlIterator;
+
+  public XMLParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    XMLInputFactory factory = XMLInputFactory.newInstance();
+    try {
+      this.xmlReader = factory.createXMLEventReader(reader);
+    } catch (XMLStreamException e) {
+      LOG.error("error occurred while creating xml reader : ", e);
+      throw new IOException("error occurred while creating xml reader : ", e);
+    }
+    xmlIterator = new XMLIterator(this.xmlReader);
+    iterator = new RowIterator(xmlIterator);
+  }
+
+  @Override
+  public Row extractHeader() {
+    Collection<String> headers = this.iterator.extractHeaders();
+    Object[] objs = new Object[headers.size()];
+    Iterator<String> iterator = headers.iterator();
+    for (int i = 0; i < headers.size(); i++) {
+      objs[i] = iterator.next();
+    }
+
+    return new Row(objs);
+  }
+
+  @Override
+  public void close() throws IOException {
+    try {
+      this.xmlReader.close();
+    } catch (XMLStreamException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return iterator;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/DeleteQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/DeleteQueryInput.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/DeleteQueryInput.java
new file mode 100644
index 0000000..1fe30fd
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/DeleteQueryInput.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+public class DeleteQueryInput {
+  private String database;
+  private String table;
+
+  public DeleteQueryInput() {
+  }
+
+  public DeleteQueryInput(String database, String table) {
+    this.database = database;
+    this.table = table;
+  }
+
+  public String getDatabase() {
+    return database;
+  }
+
+  public void setDatabase(String database) {
+    this.database = database;
+  }
+
+  public String getTable() {
+    return table;
+  }
+
+  public void setTable(String table) {
+    this.table = table;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java
new file mode 100644
index 0000000..5befc51
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+public class InsertFromQueryInput {
+  private String fromDatabase;
+  private String fromTable;
+  private String toDatabase;
+  private String toTable;
+
+  public InsertFromQueryInput() {
+  }
+
+  public InsertFromQueryInput(String fromDatabase, String fromTable, String toDatabase, String toTable) {
+    this.fromDatabase = fromDatabase;
+    this.fromTable = fromTable;
+    this.toDatabase = toDatabase;
+    this.toTable = toTable;
+  }
+
+  public String getFromDatabase() {
+    return fromDatabase;
+  }
+
+  public void setFromDatabase(String fromDatabase) {
+    this.fromDatabase = fromDatabase;
+  }
+
+  public String getFromTable() {
+    return fromTable;
+  }
+
+  public void setFromTable(String fromTable) {
+    this.fromTable = fromTable;
+  }
+
+  public String getToDatabase() {
+    return toDatabase;
+  }
+
+  public void setToDatabase(String toDatabase) {
+    this.toDatabase = toDatabase;
+  }
+
+  public String getToTable() {
+    return toTable;
+  }
+
+  public void setToTable(String toTable) {
+    this.toTable = toTable;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/LoadQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/LoadQueryInput.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/LoadQueryInput.java
new file mode 100644
index 0000000..122b754
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/LoadQueryInput.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+/**
+ * input for QueryGenerator for generating Load From Query
+ */
+public class LoadQueryInput {
+  private String hdfsFilePath;
+  private String databaseName;
+  private String tableName;
+
+  public LoadQueryInput(String hdfsFilePath, String databaseName, String tableName) {
+    this.hdfsFilePath = hdfsFilePath;
+    this.databaseName = databaseName;
+    this.tableName = tableName;
+  }
+
+  public String getHdfsFilePath() {
+    return hdfsFilePath;
+  }
+
+  public void setHdfsFilePath(String hdfsFilePath) {
+    this.hdfsFilePath = hdfsFilePath;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  @Override
+  public String toString() {
+    return "LoadQueryInput{" +
+            "hdfsFilePath='" + hdfsFilePath + '\'' +
+            ", databaseName='" + databaseName + '\'' +
+            ", tableName='" + tableName + '\'' +
+            '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java
new file mode 100644
index 0000000..6bab229
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+import org.apache.ambari.view.hive.client.ColumnDescription;
+import org.apache.ambari.view.hive.resources.uploads.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+/**
+ * generates the sql query from given data
+ */
+public class QueryGenerator {
+  protected final static Logger LOG =
+          LoggerFactory.getLogger(QueryGenerator.class);
+
+  public String generateCreateQuery(TableInfo tableInfo) {
+    String tableName = tableInfo.getTableName();
+    List<ColumnDescriptionImpl> cdList = tableInfo.getColumns();
+
+    StringBuilder query = new StringBuilder();
+    query.append("create table " + tableName + " (");
+    Collections.sort(cdList, new Comparator<ColumnDescription>() {
+      @Override
+      public int compare(ColumnDescription o1, ColumnDescription o2) {
+        return o1.getPosition() - o2.getPosition();
+      }
+    });
+
+    boolean first = true;
+    for (ColumnDescriptionImpl cd : cdList) {
+      if (first) {
+        first = false;
+      } else {
+        query.append(", ");
+      }
+
+      query.append(cd.getName() + " " + cd.getType());
+      if (cd.getPrecision() != null) {
+        query.append("(").append(cd.getPrecision());
+        if (cd.getScale() != null) {
+          query.append(",").append(cd.getScale());
+        }
+        query.append(")");
+      }
+
+    }
+
+    query.append(")");
+
+    if (tableInfo.getHiveFileType() == HiveFileType.TEXTFILE)
+      query.append(" ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE;");
+    else
+      query.append(" STORED AS " + tableInfo.getHiveFileType() + ";");
+
+    String queryString = query.toString();
+    LOG.info("Query : {}", queryString);
+    return queryString;
+  }
+
+  public String generateInsertFromQuery(InsertFromQueryInput ifqi) {
+    String insertQuery = "insert into table " + ifqi.getToDatabase() + "." + ifqi.getToTable() + " select * from " + ifqi.getFromDatabase() + "." + ifqi.getFromTable();
+    LOG.info("Insert Query : {}", insertQuery);
+    return insertQuery;
+  }
+
+  public String generateDropTableQuery(DeleteQueryInput deleteQueryInput) {
+    String dropQuery = "drop table " + deleteQueryInput.getDatabase() + "." + deleteQueryInput.getTable();
+    LOG.info("Drop Query : {}", dropQuery);
+    return dropQuery;
+  }
+
+  public String generateLoadQuery(LoadQueryInput loadQueryInput) {
+    String loadFromQuery = "LOAD DATA INPATH '"  + loadQueryInput.getHdfsFilePath() + "' INTO TABLE " + loadQueryInput.getDatabaseName() + "." + loadQueryInput.getTableName() + ";" ;
+    LOG.info("Load From Query : {}", loadFromQuery);
+    return loadFromQuery;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java
new file mode 100644
index 0000000..903e5b0
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.HiveFileType;
+
+import java.util.List;
+
+/**
+ * used as input in Query generation
+ */
+public class TableInfo {
+  private String tableName;
+  private String databaseName;
+  private List<ColumnDescriptionImpl> columns;
+  private HiveFileType hiveFileType;
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public List<ColumnDescriptionImpl> getColumns() {
+    return columns;
+  }
+
+  public void setColumns(List<ColumnDescriptionImpl> columns) {
+    this.columns = columns;
+  }
+
+  public HiveFileType getHiveFileType() {
+    return hiveFileType;
+  }
+
+  public void setHiveFileType(HiveFileType hiveFileType) {
+    this.hiveFileType = hiveFileType;
+  }
+
+  public TableInfo(String databaseName, String tableName, List<ColumnDescriptionImpl> columns, HiveFileType hiveFileType) {
+    this.tableName = tableName;
+    this.databaseName = databaseName;
+    this.columns = columns;
+    this.hiveFileType = hiveFileType;
+  }
+
+  public TableInfo(TableInfo tableInfo) {
+    this.tableName = tableInfo.tableName;
+    this.databaseName = tableInfo.databaseName;
+    this.columns = tableInfo.columns;
+    this.hiveFileType = tableInfo.hiveFileType;
+  }
+
+  public TableInfo() {
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/upload-table.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/upload-table.js
index 6a9c54b..ef4df43 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/upload-table.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/adapters/upload-table.js
@@ -22,12 +22,6 @@ import application from './application';
 import FileUploader from './file-upload';
 
 export default application.extend({
-  hdrs : function(){
-    console.log("inside hdrs : headers : ",this.get('headers'));
-    var h = Ember.$.extend(true, {},this.get('headers'));
-    delete h['Content-Type'];
-    return h;
-  }.property('headers'),
 
   buildUploadURL: function (path) {
     return this.buildURL() + "/resources/upload/" + path;
@@ -40,9 +34,10 @@ export default application.extend({
     console.log("uploader : extras : ", extras);
     console.log("uploader : files : ", files);
 
-    console.log("hdrs : ", this.get('hdrs'));
+    var hdrs = Ember.$.extend(true, {},this.get('headers'));
+    delete hdrs['Content-Type'];
     var uploader = FileUploader.create({
-      headers: this.get('hdrs'),
+      headers: hdrs,
       url: uploadUrl
     });
 
@@ -53,24 +48,46 @@ export default application.extend({
   },
 
   createTable: function (tableData) {
-    var _this = this;
-    var postHeader = JSON.parse(JSON.stringify(this.get('headers')));
-    console.log("headers postHeadesfsfdfsfsfss : : " , postHeader);
-    return Ember.$.ajax(      {
-        url :  this.buildUploadURL("createTable"),
-        type : 'post',
-        data: JSON.stringify(tableData),
-        headers: this.get('headers'),
-        dataType : 'json'
-      }
-    );
+    console.log("creating table with data :", tableData);
+    return this.doPost("createTable",tableData);
   },
 
-  getCreateTableResult : function(jobId){
-    return Ember.$.ajax(this.buildUploadURL("createTable/status"),{
-      data : {"jobId":jobId},
-      type: "get",
-      headers: this.get('headers')
-    });
+  insertIntoTable: function(insertData){
+    console.log("inserting into table with data : ", insertData);
+    return this.doPost("insertIntoTable",insertData);
+  },
+
+  deleteTable: function(deleteData){
+    console.log("delete table with info : ", deleteData);
+    return this.doPost("deleteTable",deleteData);
+  },
+
+  doPost : function(path,inputData){
+    var self = this;
+    return new Ember.RSVP.Promise(function(resolve,reject){
+                 Ember.$.ajax({
+                     url :  self.buildUploadURL(path),
+                     type : 'post',
+                     data: JSON.stringify(inputData),
+                     headers: self.get('headers'),
+                     dataType : 'json'
+                 }).done(function(data) {
+                     console.log( "inside done : data : ", data );
+                     resolve(data);
+                 }).fail(function(error) {
+                     console.log( "inside fail error :  ", error );
+                     reject(error);
+                 });
+              });
+  },
+
+  previewFromHDFS : function(previewFromHdfsData){
+    console.log("preview from hdfs with info : ", previewFromHdfsData);
+    return this.doPost("previewFromHdfs",previewFromHdfsData)
+  },
+
+  uploadFromHDFS : function(uploadFromHdfsData){
+    console.log("upload from hdfs with info : ", uploadFromHdfsData);
+    return this.doPost("uploadFromHDFS",uploadFromHdfsData)
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/components/input-header.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/input-header.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/input-header.js
new file mode 100644
index 0000000..7ff5bf7
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/input-header.js
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  tagName: '',
+  dataTypes: null,
+  column: null,
+  precisionChanged: Ember.observer('column.precision', function () {
+    var col = this.get('column');
+    if( typeof col.precision !== 'number') {
+        Ember.set(col, 'precision', Number(col.precision));
+      }
+  }),
+
+  scaleChanged: Ember.observer('column.scale', function () {
+    var col = this.get('column');
+    if( typeof col.scale !== 'number'){
+      Ember.set(col,'scale',Number(col.scale));
+    }
+  }),
+
+  typeChanged: Ember.observer('column.type', function () {
+    var col = this.get('column');
+
+    var type = col.type;
+    if( type != "DECIMAL" ){
+      Ember.set(col,'scale');
+    }
+
+    if(type != "VARCHAR" && type != "CHAR" && type != "DECIMAL" ){
+      Ember.set(col,'precision');
+    }
+  }),
+
+  noPrecision: Ember.computed('column.type', function () {
+    var type = this.get('column').type;
+    return (type == "VARCHAR" || type == "CHAR" || type == "DECIMAL" ) ? false : true;
+  }),
+
+  noScale: Ember.computed('column.type', function () {
+    return this.get('column').type == "DECIMAL" ? false : true;
+  })
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/components/radio-button.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/radio-button.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/radio-button.js
new file mode 100644
index 0000000..a07caaf
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/radio-button.js
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  tagName: 'input',
+  type: 'radio',
+  attributeBindings: ['type', 'htmlChecked:checked', 'value', 'name', 'disabled'],
+
+  htmlChecked: function() {
+    return this.get('value') === this.get('checked');
+  }.property('value', 'checked'),
+
+  change: function() {
+    this.set('checked', this.get('value'));
+  },
+
+  _updateElementValue: function() {
+    Ember.run.next(this, function() {
+      this.$().prop('checked', this.get('htmlChecked'));
+    });
+  }.observes('htmlChecked')
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
index ab7b934..5085cc5 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
@@ -22,35 +22,67 @@ import constants from 'hive/utils/constants';
 
 
 export default Ember.Controller.extend({
+  isLocalUpload : Ember.computed.equal("uploadSource","local"),
+  uploadSource : "local",
+  hdfsPath : "",
+  jobService: Ember.inject.service(constants.namingConventions.job),
   notifyService: Ember.inject.service(constants.namingConventions.notify),
-  needs : ['databases'],
-  showErrors : false,
+  needs: ['databases'],
+  showErrors: false,
   uploader: Uploader.create(),
   baseUrl: "/resources/upload",
-  isFirstRowHeader: null, // is first row  header
+  isFirstRowHeader: true, // is first row  header
   header: null,  // header received from server
   files: null, // files that need to be uploaded only file[0] is relevant
   firstRow: [], // the actual first row of the table.
   rows: null,  // preview rows received from server
-  databaseName:null,
-  selectedDatabase : null,
-  filePath : null,
+  databaseName: null,
+  selectedDatabase: null,
+  filePath: null,
   tableName: null,
-  dataTypes : [
-     "TINYINT", //
-     "SMALLINT", //
-     "INT", //
-     "BIGINT", //
-     "BOOLEAN", //
-     "FLOAT", //
-     "DOUBLE", //
-     "STRING", //
-     "BINARY", // -- (Note: Available in Hive 0.8.0 and later)
-     "TIMESTAMP", // -- (Note: Available in Hive 0.8.0 and later)
-     "DECIMAL", // -- (Note: Available in Hive 0.11.0 and later)
-     "DATE", // -- (Note: Available in Hive 0.12.0 and later)
-     "VARCHAR", // -- (Note: Available in Hive 0.12.0 and later)
-     "CHAR" // -- (Note: Available in Hive 0.13.0 and later)
+  uploadProgressInfos : [],
+  onChangeUploadSource : function(){
+    this.clearFields();
+  }.observes("uploadSource"),
+  uploadProgressInfo : Ember.computed("uploadProgressInfos.[]",function(){
+    var info = "";
+    for( var i = 0 ; i < this.get('uploadProgressInfos').length ; i++)
+        info += this.get('uploadProgressInfos').objectAt(i);
+
+    return new Ember.Handlebars.SafeString(info);
+  }),
+  inputFileTypes :[
+    {id : "CSV", name : "CSV"},
+    {id : "JSON", name : "JSON"},
+    {id : "XML", name : "XML"}
+  ],
+  inputFileType : {id : "CSV", name : "CSV"},
+  inputFileTypeCSV : Ember.computed.equal('inputFileType.id',"CSV"),
+  fileTypes:[
+    "SEQUENCEFILE",
+    "TEXTFILE"    ,
+    "RCFILE"      ,
+    "ORC"         ,
+    "PARQUET"     ,
+    "AVRO"        ,
+    "INPUTFORMAT"
+  ],
+  selectedFileType: "ORC",
+  dataTypes: [
+    "TINYINT", //
+    "SMALLINT", //
+    "INT", //
+    "BIGINT", //
+    "BOOLEAN", //
+    "FLOAT", //
+    "DOUBLE", //
+    "STRING", //
+    "BINARY", // -- (Note: Available in Hive 0.8.0 and later)
+    "TIMESTAMP", // -- (Note: Available in Hive 0.8.0 and later)
+    "DECIMAL", // -- (Note: Available in Hive 0.11.0 and later)
+    "DATE", // -- (Note: Available in Hive 0.12.0 and later)
+    "VARCHAR", // -- (Note: Available in Hive 0.12.0 and later)
+    "CHAR" // -- (Note: Available in Hive 0.13.0 and later)
   ],
   isFirstRowHeaderDidChange: function () {
     console.log("inside onFirstRowHeader : isFirstRowHeader : " + this.get('isFirstRowHeader'));
@@ -59,7 +91,7 @@ export default Ember.Controller.extend({
         if (this.get('rows')) {
           this.get('rows').unshiftObject({row: this.get('firstRow')});
         }
-      } else {
+      } else if( this.get('header') ) { // headers are available
         // take first row of
         this.get('header').forEach(function (item, index) {
           console.log("item : ", item);
@@ -74,23 +106,43 @@ export default Ember.Controller.extend({
     }
   }.observes('isFirstRowHeader'),
 
-  uploadForPreview: function (files) {
-    console.log("uploaderForPreview called.");
-    return this.get('uploader').uploadFiles('preview', files);
+  popUploadProgressInfos : function(){
+    this.get('uploadProgressInfos').popObject();
+  },
+
+  pushUploadProgressInfos : function(info){
+    this.get('uploadProgressInfos').pushObject(info);
+  },
+
+  clearUploadProgressModal : function(){
+    for( var i = 0 ; i < this.get('uploadProgressInfos').length ; i++){
+      this.popUploadProgressInfos();
+    }
+  },
+
+  hideUploadModal : function(){
+    console.log("hiding the modal ....");
+    this.clearUploadProgressModal();
+    Ember.$("#uploadProgressModal").modal("hide");
+  },
+
+  showUploadModal : function(){
+    Ember.$("#uploadProgressModal").modal("show");
   },
 
   clearFields: function () {
+    this.set("hdfsPath");
     this.set("header");
     this.set("rows");
     this.set("error");
-    this.set('isFirstRowHeader');
+    this.set('isFirstRowHeader',true);
     this.set('files');
     this.set("firstRow");
     this.set("selectedDatabase");
     this.set("databaseName");
     this.set("filePath");
     this.set('tableName');
-
+    this.clearUploadProgressModal();
     this.printValues();
   },
 
@@ -103,65 +155,397 @@ export default Ember.Controller.extend({
     console.log("files : ", this.get('files'));
     console.log("firstRow : ", this.get('firstRow'));
   },
+
+  generateTempTableName : function(){
+    var text = "";
+    var possible = "abcdefghijklmnopqrstuvwxyz";
+
+    for( var i=0; i < 30; i++ )
+      text += possible.charAt(Math.floor(Math.random() * possible.length));
+
+    return text;
+  },
+
+  waitForJobStatus: function (jobId, resolve, reject) {
+    console.log("finding status of job: ", jobId);
+    var self = this;
+    var fetchJobPromise = this.get('jobService').fetchJobStatus(jobId);
+//      console.log("fetchJobPromise : ", fetchJobPromise);
+
+      fetchJobPromise.then(function (data) {
+        console.log("waitForJobStatus : data : ", data);
+        var status = data.jobStatus;
+        if (status == "Succeeded") {
+          console.log("resolving waitForJobStatus with : " , status);
+          resolve(status);
+        } else if (status == "Canceled" || status == "Closed" || status == "Error") {
+          console.log("rejecting waitForJobStatus with : " + status);
+          reject(new Error(status));
+        } else {
+          console.log("retrying waitForJobStatus : ");
+          self.waitForJobStatus(jobId, resolve, reject);
+        }
+      }, function (error) {
+        console.log("rejecting waitForJobStatus with : " + error);
+        reject(error);
+    })
+  },
+
+  uploadForPreview: function (files) {
+    console.log("uploaderForPreview called.");
+    var self = this;
+    return this.get('uploader').uploadFiles('preview', files, {"isFirstRowHeader" : self.get("isFirstRowHeader"), "inputFileType" : self.get("inputFileType").id});
+  },
+
+  uploadForPreviewFromHDFS : function(){
+    console.log("uploadForPreviewFromHDFS called.");
+    return this.get('uploader').previewFromHDFS({"isFirstRowHeader" : this.get("isFirstRowHeader"),"inputFileType" : this.get("inputFileType").id , "hdfsPath" : this.get("hdfsPath") });
+  },
+
+  generatePreview : function(files){
+    var self = this;
+    var promise = null;
+    if(this.get('isLocalUpload')){
+      promise = this.uploadForPreview(files);
+    }else{
+      promise = this.uploadForPreviewFromHDFS();
+    }
+
+    return promise.then(function (data) {
+        self.onGeneratePreviewSuccess(data);
+    }, function (error) {
+        self.onGeneratePreviewFailure(error);
+    });
+  },
+
+  waitForGeneratingPreview: function () {
+    console.log("waitForGeneratingPreview");
+    this.showUploadModal();
+    this.pushUploadProgressInfos("<li> Generating Preview .... </li>")
+  },
+
   previewTable: function (data) {
     console.log('inside previewTable');
     this.set("header", data.header);
-    this.set("rows", data.rows);
     this.set("firstRow", data.rows[0].row);
     console.log("firstRow : ", this.get('firstRow'));
     this.set('isFirstRowHeader', data.isFirstRowHeader);
+    this.set('tableName',data.tableName);
+    if(data.isFirstRowHeader == true){
+        data.rows = data.rows.slice(1);
+    }
+    this.set("rows", data.rows);
+  },
+
+  onGeneratePreviewSuccess: function (data) {
+    console.log("onGeneratePreviewSuccess");
+    this.hideUploadModal();
+    this.previewTable(data);
+  },
+
+  onGeneratePreviewFailure: function (error) {
+    console.log("onGeneratePreviewFailure");
+    this.hideUploadModal();
+    this.setError(error);
+  },
+
+  createActualTable : function(){
+    console.log("createActualTable");
+    return this.createTable();
   },
 
-  fetchCreateTableStatus: function (jobId, resolve, reject) {
+  waitForCreateActualTable: function (jobId) {
+    console.log("waitForCreateActualTable");
+    this.pushUploadProgressInfos("<li>Creating actual table.... </li>");
     var self = this;
-    this.get('uploader').getCreateTableResult(jobId).then(function (data) {
-      console.log("fetchCreateTableStatus : data : ", data);
-      var status = data.status;
-      if (status == "Succeeded") {
-        console.log("resolving fetchCreateTableStatus with : " + data);
-        resolve(status);
-      } else if (status == "Canceled" || status == "Closed" || status == "Error") {
-        console.log("rejecting fetchCreateTableStatus with : " + status);
-        reject(new Error(status));
-      } else {
-        console.log("retrying fetchCreateTableStatus : ");
-        self.fetchCreateTableStatus(jobId, resolve, reject);
-      }
-    }, function (error) {
-      console.log("rejecting fetchCreateTableStatus with : " + error);
-      reject(error);
-    })
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
+    });
+
+    return p;
+  },
+
+  onCreateActualTableSuccess : function(){
+    console.log("onCreateTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos("<li> Successfully created actual table. </li>");
+  },
+
+  onCreateActualTableFailure : function(error){
+    console.log("onCreateActualTableFailure");
+    this.setError(error);
   },
 
-  waitForResult: function (jobId) {
+  createTempTable : function(){
+    console.log("createTempTable");
+    var tempTableName = this.generateTempTableName();
+    this.set('tempTableName',tempTableName);
+    return this.get('uploader').createTable({
+      "isFirstRowHeader": this.get("isFirstRowHeader"),
+      "header": this.get("header"),
+      "tableName": tempTableName,
+      "databaseName": this.get('databaseName'),
+      "fileType":"TEXTFILE"
+    });
+  },
+
+  waitForCreateTempTable: function (jobId) {
+    console.log("waitForCreateTempTable");
+    this.pushUploadProgressInfos("<li>Creating temporary table.... </li>");
     var self = this;
-    return new Ember.RSVP.Promise(function (resolve, reject) {
-      self.fetchCreateTableStatus(jobId,resolve,reject);
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
     });
+
+    return p;
+  },
+
+  onCreateTempTableSuccess : function(){
+    console.log("onCreateTempTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos("<li> Successfully created temporary table. </li>");
+  },
+
+  onCreateTempTableFailure : function(error){
+    console.log("onCreateTempTableFailure");
+    this.setError(error);
+  },
+
+  uploadFile : function(){
+    console.log("uploadFile");
+    if( this.get("isLocalUpload")){
+      this.pushUploadProgressInfos("<li>Uploading file .... </li>");
+      return this.uploadTable();
+    }else{
+      return this.uploadTableFromHdfs();
+    }
+  },
+
+  waitForUploadingFile: function (data) {
+    console.log("waitForUploadingFile");
+    if( data.jobId ){
+      this.pushUploadProgressInfos("<li>Uploading file .... </li>");
+      var self = this;
+          var p = new Ember.RSVP.Promise(function (resolve, reject) {
+            self.waitForJobStatus(data.jobId, resolve, reject);
+          });
+      return p;
+    }else{
+      return  Ember.RSVP.Promise.resolve(data);
+    }
+  },
+
+  onUploadingFileSuccess: function () {
+    console.log("onUploadingFileSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos("<li> Successfully uploaded file. </li>");
+  },
+
+  onUploadingFileFailure: function (error) {
+    console.log("onUploadingFileFailure");
+    this.setError(error);
+  },
+
+  insertIntoTable : function(){
+    console.log("insertIntoTable");
+
+    return this.get('uploader').insertIntoTable({
+      "fromDatabase":  this.get("databaseName"),
+      "fromTable": this.get("tempTableName"),
+      "toDatabase": this.get("databaseName"),
+      "toTable": this.get("tableName")
+    });
+  },
+
+  waitForInsertIntoTable: function (jobId) {
+    console.log("waitForInsertIntoTable");
+    this.pushUploadProgressInfos("<li>Inserting rows from temporary table to actual table .... </li>");
+    var self = this;
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
+    });
+
+    return p;
+  },
+
+  onInsertIntoTableSuccess : function(){
+    console.log("onInsertIntoTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos("<li>Successfully inserted rows from temporary table to actual table. </li>");
+  },
+
+  onInsertIntoTableFailure : function(error){
+    console.log("onInsertIntoTableFailure");
+    this.setError(error);
+  },
+
+  deleteTempTable : function(){
+    console.log("deleteTempTable");
+
+    return this.get('uploader').deleteTable({
+      "database":  this.get("databaseName"),
+      "table": this.get("tempTableName")
+    });
+  },
+
+  waitForDeleteTempTable: function (jobId) {
+    console.log("waitForDeleteTempTable");
+    this.pushUploadProgressInfos("<li>Deleting temporary table .... </li>");
+    var self = this;
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
+    });
+
+    return p;
+  },
+
+  onDeleteTempTableSuccess : function(){
+    console.log("onDeleteTempTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos("<li>Successfully inserted row. </li>");
+    this.onUploadSuccessfull();
+  },
+
+  onDeleteTempTableFailure : function(error){
+    console.log("onDeleteTempTableFailure");
+    this.setError(error);
+  },
+
+  createTableAndUploadFile : function(){
+    var self = this;
+    self.setError();
+    self.showUploadModal();
+    self.createActualTable()
+      .then(function(data){
+        console.log("1. received data : ", data);
+        return self.waitForCreateActualTable(data.jobId);
+      },function(error){
+        self.onCreateActualTableFailure(error);
+        console.log("Error occurred: ", error);
+        throw error;
+      })
+      .then(function(data){
+        console.log("2. received data : ", data);
+        self.onCreateActualTableSuccess(data);
+        return self.createTempTable(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onCreateActualTableFailure(new Error("Server job for creation of actual table failed."));
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("3. received data : ", data);
+        return self.waitForCreateTempTable(data.jobId);
+      },function(error){
+        if(!self.get('error')){
+          self.onCreateTempTableFailure(error);
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("4. received data : ", data);
+        self.onCreateTempTableSuccess(data);
+        return self.uploadFile(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onCreateTempTableFailure(new Error("Server job for creation of temporary table failed."));
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      }).then(function(data){
+        console.log("4.5 received data : ", data);
+        return self.waitForUploadingFile(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onUploadingFileFailure(error);
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("5. received data : ", data);
+        self.onUploadingFileSuccess(data);
+        return self.insertIntoTable(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onUploadingFileFailure(new Error("Server job for upload of file failed."));
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("6. received data : ", data);
+        return self.waitForInsertIntoTable(data.jobId);
+      },function(error){
+        if(!self.get('error')){
+          self.onInsertIntoTableFailure(error);
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("7. received data : ", data);
+        self.onInsertIntoTableSuccess(data);
+        return self.deleteTempTable(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onInsertIntoTableFailure(new Error("Server job for insert from temporary to actual table failed."));
+          self.setError(error);
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("8. received data : ", data);
+        return self.waitForDeleteTempTable(data.jobId);
+      },function(error){
+        if(!self.get('error')){
+          self.onDeleteTempTableFailure(error);
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        console.log("9. received data : ", data);
+        self.onDeleteTempTableSuccess(data);
+      },function(error){
+        if(!self.get('error')){
+          self.onDeleteTempTableFailure(new Error("Server job for deleting temporary table failed."));
+          console.log("Error occurred: ", error);
+        }
+        throw error;
+      }).finally(function(){
+        console.log("finally hide the modal always");
+        self.hideUploadModal();
+      });
   },
 
   createTable: function () {
-    var headers = JSON.stringify(this.get('header'));
+    console.log("table headers : ", this.get('header'));
+    var headers = this.get('header');
 
     var selectedDatabase = this.get('selectedDatabase');
-    if( null == selectedDatabase || typeof selectedDatabase === 'undefined'){
-      throw new Error(constants.hive.errors.emptyDatabase);
+    if (null == selectedDatabase || typeof selectedDatabase === 'undefined') {
+      throw new Error(Ember.I18n.t('hive.errors.emptyDatabase'));
     }
 
-    this.set('databaseName',this.get('selectedDatabase').get('name'));
+    this.set('databaseName', this.get('selectedDatabase').get('name'));
     var databaseName = this.get('databaseName');
     var tableName = this.get('tableName');
     var isFirstRowHeader = this.get('isFirstRowHeader');
-    console.log("databaseName : " , databaseName, ", tableName : ", tableName, ", isFirstRowHeader : " , isFirstRowHeader , ", headers : ", headers);
+    var filetype = this.get("selectedFileType");
 
-    if( null == databaseName || typeof databaseName === 'undefined'){
-      throw new Error(constants.hive.errors.emptyDatabase);
+    if (null == databaseName || typeof databaseName === 'undefined' || databaseName == '') {
+      throw new Error(Ember.I18n.t('hive.errors.emptyDatabase'));
     }
-    if( null == tableName || typeof tableName === 'undefined'){
-      throw new Error(constants.hive.errors.emptyTableName);
+    if (null == tableName || typeof tableName === 'undefined' || tableName == '') {
+      throw new Error(Ember.I18n.t('hive.errors.emptyTableName'));
     }
-    if( null == isFirstRowHeader || typeof isFirstRowHeader === 'undefined'){
-      throw new Error(constants.hive.errors.emptyIsFirstRow);
+    if (null == isFirstRowHeader || typeof isFirstRowHeader === 'undefined') {
+      throw new Error(Ember.I18n.t('hive.errors.emptyIsFirstRow'));
     }
 
     this.validateColumns();
@@ -170,35 +554,54 @@ export default Ember.Controller.extend({
       "isFirstRowHeader": isFirstRowHeader,
       "header": headers,
       "tableName": tableName,
-      "databaseName": databaseName
+      "databaseName": databaseName,
+      "fileType":filetype
     });
   },
 
-  validateColumns: function(){
-    // TODO :check validation of columnames.
+  validateColumns: function () {
     // throw exception if invalid.
   },
-  setError: function(error){
-    this.set('error',JSON.stringify(error));
-    console.log("upload table error : ",error);
-    this.get('notifyService').error(error);
+  setError: function (error) {
+    if(error){
+      console.log("upload table error : ", error);
+      this.set('error', JSON.stringify(error));
+      this.get('notifyService').error(error);
+    }else{
+      this.set("error");
+    }
   },
 
   previewError: function (error) {
     this.setError(error);
   },
 
+  uploadTableFromHdfs : function(){
+    console.log("uploadTableFromHdfs called.");
+    if(!(this.get("inputFileTypeCSV") == true && this.get("isFirstRowHeader") == false) ){
+      this.pushUploadProgressInfos("<li>Uploading file .... </li>");
+    }
+    return  this.get('uploader').uploadFromHDFS({
+        "isFirstRowHeader": this.get("isFirstRowHeader"),
+        "databaseName" :  this.get('databaseName'),
+        "tableName" : this.get("tempTableName"),
+        "inputFileType" : this.get("inputFileType").id,
+        "hdfsPath" : this.get("hdfsPath")
+      });
+  },
   uploadTable: function () {
     this.printValues();
     return this.get('uploader').uploadFiles('upload', this.get('files'), {
       "isFirstRowHeader": this.get("isFirstRowHeader"),
-      "filePath": this.get('filePath')
+      "databaseName" :  this.get('databaseName'),
+      "tableName" : this.get("tempTableName"),
+      "inputFileType" : this.get("inputFileType").id
     });
   },
 
   onUploadSuccessfull: function (data) {
     console.log("onUploadSuccessfull : ", data);
-    this.get('notifyService').success( "Uploaded Successfully", "Table " + this.get('tableName') + " created in database " + this.get("databaseName"));
+    this.get('notifyService').success("Uploaded Successfully", "Table " + this.get('tableName') + " created in database " + this.get("databaseName"));
     this.clearFields();
   },
 
@@ -206,9 +609,18 @@ export default Ember.Controller.extend({
     console.log("onUploadError : ", error);
     this.setError(error);
   },
-
+  showOrHide: function () {
+    if (this.get('show') == false) {
+      this.set("displayOption", "display:none");
+      this.set("showMoreOrLess", "Show More");
+    } else {
+      this.set("displayOption", "display:table-row");
+      this.set("showMoreOrLess", "Show Less");
+    }
+  },
+  displayOption: "display:none",
   actions: {
-    toggleErrors : function(){
+    toggleErrors: function () {
       this.toggleProperty('showErrors');
     },
     filesUploaded: function (files) {
@@ -222,40 +634,22 @@ export default Ember.Controller.extend({
       var tableName = name.substr(0, i);
       this.set('tableName', tableName);
       var self = this;
-      return this.uploadForPreview(files).then(function (data) {
-        self.previewTable(data);
-      }, function (error) {
-        self.previewError(error);
-      });
+      return this.generatePreview(files)
     },
-
-    createTableAndUploadFile: function () {
-      var self = this;
-
-      try {
-        this.createTable()
-          .then(function (jobData) {
-            console.log("jobData : ", jobData);
-            self.set('filePath', jobData.filePath);
-            self.waitForResult(jobData.jobId)
-              .then(function (successStatus) {
-                console.log("successStatus : ", successStatus);
-                self.uploadTable().then(function (operationData) {
-                  console.log("operation successfull operationData : ", operationData);
-                  self.onUploadSuccessfull(operationData);
-                }, function (error) {
-                  self.onUploadError(error);
-                });
-              }, function (error) {
-                self.onUploadError(error);
-              })
-          }, function (error) {
-            self.onUploadError(error);
-          })
+    previewFromHdfs : function(){
+      return this.generatePreview();
+    },
+    uploadTable : function(){
+      try{
+        this.createTableAndUploadFile();
       }catch(e){
-        self.onUploadError(e);
+        console.log("exception occured : ", e);
+        this.setError(e);
+        this.hideUploadModal();
       }
+    },
+    uploadFromHDFS : function(){
+      this.set("isLocalUpload",false);
     }
-
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
index bd0e6e6..46847ae 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
@@ -201,6 +201,7 @@ TRANSLATIONS = {
     collapse: 'Collapse message',
     previousPage: 'previous',
     uploadTable: 'Upload Table',
+    showPreview: 'Preview',
     nextPage: 'next',
     loadMore: 'Load more...',
     saveHdfs: 'Save to HDFS',

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/services/job.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/services/job.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/services/job.js
index 3a1f82a..6cb4170 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/services/job.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/services/job.js
@@ -37,5 +37,20 @@ export default Ember.Service.extend({
          job.reload();
        }
     });
+  },
+
+  fetchJobStatus: function (jobId) {
+    console.log("finding status of job : ", jobId);
+    var self = this;
+    var url = this.container.lookup('adapter:application').buildURL();
+    url +=  "/jobs/" + jobId + "/status";
+
+    return Ember.$.ajax({
+      url: url,
+      type: 'GET',
+      headers: {
+        'X-Requested-By': 'ambari'
+      }
+    });
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss b/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
index 31dafdd..716dc02 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
@@ -579,7 +579,7 @@ tree-view ul li {
   margin: 0 0 10px;
 }
 
-#query-results, #upload-table  {
+#query-results, #upload-table{
   .table {
     display: inline-block;
     overflow: auto;
@@ -589,11 +589,53 @@ tree-view ul li {
     margin-top: 10px;
   }
 
-  input {
+  input, .selectize-input {
     width: 300px;
   }
 }
 
+#upload-controls {
+  .table {
+    display: inline-block;
+    overflow: auto;
+  }
+
+  input, .selectize-input {
+    width: 94px;
+    font-size : smaller;
+  }
+
+  .hidden {
+    visibility:hidden;
+  }
+
+  .visible {
+    visibility:visible;
+  }
+
+  td {
+    padding-right : 5px;
+  }
+}
+
+
+.data-upload-form tr td {
+    padding-right : 5px;
+}
+
+td.data-upload-form-label {
+    width: 150px;
+    padding-left: 50px;
+}
+td.data-upload-form-field {
+    width: 350px;
+}
+
+#hdfs-param input {
+  width: 80%;
+  display: inline;
+}
+
 #visualization{
     .max-rows {
       float: right;
@@ -609,8 +651,13 @@ tree-view ul li {
 .mozBoxSizeFix {
   -moz-box-sizing: border-box;
 }
-
-#isFirstRowHeader {
+.show-data {
+    display : block;
+}
+.hide-data {
+    display : none;
+}
+.no-shadow {
   box-shadow: none;
   width: inherit;
 }
@@ -618,3 +665,7 @@ tree-view ul li {
 .widget-controls {
   padding-right: 10px;
 }
+
+table.no-border, table.no-border tr, table.no-border tr td {
+  border: none;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0747b6c7/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/input-header.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/input-header.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/input-header.hbs
new file mode 100644
index 0000000..b2349fa
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/input-header.hbs
@@ -0,0 +1,20 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<td  {{bind-attr class="noPrecision:hidden:visible"}}>{{input type="number" placeholder="precision" class="form-control" value=column.precision }}</td>
+<td {{bind-attr class="noScale:hidden:visible"}}>{{input placeholder="scale" type="number" class="form-control" value=column.scale }}</td>
\ No newline at end of file