You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ni...@apache.org on 2017/02/07 07:33:28 UTC

[1/2] ambari git commit: AMBARI-19872 : HiveView2.0 : added Upload CSV, JSON, XML to create table feature in the new view (nitirajrathore)

Repository: ambari
Updated Branches:
  refs/heads/trunk 239d9c1ae -> 536192bb6


http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
new file mode 100644
index 0000000..ba3260c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
@@ -0,0 +1,925 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import NewTable from './new';
+import constants from '../../../../utils/constants';
+import Column from '../../../../models/column';
+import datatypes from '../../../../configs/datatypes';
+
+export default NewTable.extend({
+  COLUMN_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
+  TABLE_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
+  HDFS_PATH_REGEX: "^[/]{1}.+",  // unix path allows everything but here we have to mention full path so starts with /
+  init: function () {
+    this._super();
+  },
+
+  COLUMN_NAME_PREFIX : "column",
+  i18n : Ember.inject.service('i18n'),
+  jobService: Ember.inject.service(constants.services.jobs),
+  notifyService: Ember.inject.service(constants.services.alertMessages),
+  showErrors: false,
+  baseUrl: "/resources/upload",
+  header: null,  // header received from server
+  files: null, // files that need to be uploaded only file[0] is relevant
+  firstRow: [], // the actual first row of the table.
+  rows: null,  // preview rows received from server
+  databaseName: null,
+  selectedDatabase: null,
+  filePath: null,
+  tableName: null,
+  uploadProgressInfos : [],
+  DEFAULT_DB_NAME : 'default',
+  showPreview : false,
+  containsEndlines: false,
+  storedAsTextFile : Ember.computed.equal("selectedFileType","TEXTFILE"),
+  storedAsNotTextFile : Ember.computed.not("storedAsTextFile"),
+  setupController(controller, model) {
+    this._super(controller, model);
+    this.controller.set("showUploadTableModal", false);
+  },
+  onChangeSelectedFileType: function(){
+    if(this.get('selectedFileType') === this.get('fileTypes')[1] && this.get('containsEndlines') === true){
+      this.set('containsEndlines', false);
+    }
+  }.observes("selectedFileType", "containsEndlines"),
+  getUploader(){
+    return this.get('store').adapterFor('upload-table');
+  },
+  onChangeUploadSource : function(){
+    this.clearFields();
+  }.observes("uploadSource"),
+  showCSVFormatInput: false,
+  uploadProgressInfo : Ember.computed("uploadProgressInfos.[]",function(){
+    var info = "";
+    for( var i = 0 ; i < this.get('uploadProgressInfos').length ; i++)
+      info += this.get('uploadProgressInfos').objectAt(i);
+
+    return new Ember.Handlebars.SafeString(info);
+  }),
+  _setHeaderElements : function(header,valueArray){
+    header.forEach(function (item, index) {
+      Ember.set(item, 'name',  valueArray[index]);
+    }, this);
+  },
+  isFirstRowHeaderDidChange: function () {
+    if (this.get('isFirstRowHeader') != null && typeof this.get('isFirstRowHeader') !== 'undefined') {
+      if (this.get('isFirstRowHeader') == false) {
+        if (this.get('rows')) {
+          this.get('rows').unshiftObject({row: this.get('firstRow')});
+          this._setHeaderElements(this.get('header'),this.get('defaultColumnNames'));
+        }
+      } else if (this.get('header')) { // headers are available
+        // take first row of
+        this._setHeaderElements(this.get('header'),this.get('firstRow'));
+        this.get('rows').removeAt(0);
+      }
+
+      this.printValues();
+    }
+  }.observes('isFirstRowHeader'),
+
+  popUploadProgressInfos: function () {
+    // var msg = this.get('uploadProgressInfos').popObject();
+  },
+
+  pushUploadProgressInfos : function(info){
+    this.controller.set("uploadTableMessage", info);
+    this.showUploadModal();
+    // this.get('uploadProgressInfos').pushObject(info);
+  },
+  clearUploadProgressModal : function(){
+    var len = this.get('uploadProgressInfos').length;
+    for( var i = 0 ; i < len ; i++){
+      this.popUploadProgressInfos();
+    }
+  },
+
+  hideUploadModal : function(){
+    this.controller.set("showUploadTableModal", false);
+    this.clearUploadProgressModal();
+  },
+
+  showUploadModal : function(){
+    this.controller.set("showUploadTableModal", true);
+  },
+
+  clearFields: function () {
+    this.set("showPreview",false);
+    this.set("hdfsPath");
+    this.set("header");
+    this.set("rows");
+    this.set("escapedBy");
+    this.set("fieldsTerminatedBy");
+    this.set("error");
+    this.set('files');
+    this.set("firstRow");
+    this.set("selectedDatabase",null);
+    this.set("databaseName");
+    this.set("filePath");
+    this.set('tableName');
+    this.clearUploadProgressModal();
+    this.printValues();
+  },
+
+  printValues: function () {
+    console.log("header : ", this.get('header'),
+      ". rows : ",this.get('rows'),". error : ", this.get('error'),
+      " isFirstRowHeader : ", this.get('isFirstRowHeader'),
+      "firstRow : ", this.get('firstRow'));
+  },
+
+  generateTempTableName: function () {
+    var text = "";
+    var possible = "abcdefghijklmnopqrstuvwxyz";
+
+    for (var i = 0; i < 30; i++)
+      text += possible.charAt(Math.floor(Math.random() * possible.length));
+
+    return text;
+  },
+
+  waitForJobStatus: function (jobId, resolve, reject) {
+    console.log("finding status of job: ", jobId);
+    var self = this;
+    var fetchJobPromise = this.get('jobService').getJob(jobId);
+    fetchJobPromise.then(function (data) {
+      console.log("waitForJobStatus : data : ", data);
+      var job = JSON.parse(JSON.stringify(data));
+      var status = job.status;
+      if (status == constants.statuses.succeeded ) {
+        console.log("resolving waitForJobStatus with : " , status);
+        resolve(job);
+      } else if (status == constants.statuses.canceled || status == constants.statuses.closed || status == constants.statuses.error) {
+        console.log("rejecting waitForJobStatus with : " + status);
+        reject(new Error(job.statusMessage));
+      } else {
+        Ember.run.later(function(){
+          console.log("retrying waitForJobStatus : ", jobId);
+          self.waitForJobStatus(jobId, resolve, reject);
+        }, 2000);
+      }
+    }, function (error) {
+      console.log("rejecting waitForJobStatus with : " + error);
+      reject(error);
+    })
+  },
+
+  uploadForPreview: function (sourceObject) {
+    console.log("uploaderForPreview called.");
+    let files = sourceObject.get("fileInfo.files");
+    let csvParams = sourceObject.get("fileFormatInfo.csvParams");
+
+    return this.getUploader().uploadFiles('preview', files, {
+      "isFirstRowHeader": sourceObject.get("isFirstRowHeader"),
+      "inputFileType": sourceObject.get("fileFormatInfo.inputFileType").id,
+      "csvDelimiter": csvParams.get("csvDelimiter").name,
+      "csvEscape": csvParams.get("csvEscape").name,
+      "csvQuote": csvParams.get("csvQuote").name
+    });
+  },
+
+  getAsciiChar : function(key){
+    if(!key){
+      return null;
+    }
+
+    var value = this.get(key);
+    if(value && value.id != -1) {
+      return String.fromCharCode(value.id);
+    }else{
+      return null;
+    }
+  },
+  getCSVParams : function(){
+    var csvd = this.getAsciiChar('csvDelimiter');
+    if(!csvd && csvd != 0) csvd = this.get('DEFAULT_CSV_DELIMITER');
+
+    var csvq = this.getAsciiChar('csvQuote');
+    if(!csvq && csvq != 0) csvq = this.get('DEFAULT_CSV_QUOTE');
+
+    var csve = this.getAsciiChar('csvEscape');
+    if(!csve && csve != 0) csve = this.get('DEFAULT_CSV_ESCAPE');
+
+    return {"csvDelimiter": csvd, "csvQuote" : csvq, "csvEscape": csve};
+  },
+
+  uploadForPreviewFromHDFS: function (sourceObject) {
+    console.log("uploadForPreviewFromHDFS called.");
+    // this.validateHDFSPath(hdfsPath);
+    var self = sourceObject;
+    var hdfsPath = sourceObject.get("fileInfo.hdfsPath");
+    var csvParams = sourceObject.get("fileFormatInfo.csvParams");
+
+    return this.getUploader().previewFromHDFS({
+      "isFirstRowHeader": sourceObject.get("fileFormatInfo.isFirstRowHeader"),
+      "inputFileType": sourceObject.get("fileFormatInfo.inputFileType").id,
+      "hdfsPath": hdfsPath,
+      "csvDelimiter": csvParams.get("csvDelimiter").name,
+      "csvEscape": csvParams.get("csvEscape").name,
+      "csvQuote": csvParams.get("csvQuote").name
+    });
+  },
+
+  generatePreview: function (sourceObject) {
+    var self = this;
+    var promise = null;
+    try {
+      this.waitForGeneratingPreview();
+      if (sourceObject.get('fileInfo.uploadSource') === "local" ) {
+        promise = this.uploadForPreview(sourceObject);
+      } else {
+        promise = this.uploadForPreviewFromHDFS(sourceObject);
+      }
+
+      return promise.then(function (data) {
+        self.onGeneratePreviewSuccess(data);
+      }, function (error) {
+        self.onGeneratePreviewFailure(error);
+      }).catch(function (error) {
+        console.log("inside catch : ", error);
+      }).finally(function () {
+        console.log("finally hide the modal always after preview.");
+        self.hideUploadModal();
+      });
+    }catch(e){
+      // exception before promise will be caught here.
+      console.log("exception before promise : ", e);
+      self.setError(e);
+    }finally{
+      console.log("finally hide the modal always after preview.");
+      self.hideUploadModal();
+    }
+  },
+
+  waitForGeneratingPreview: function () {
+    console.log("waitForGeneratingPreview");
+    this.showUploadModal();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.generatingPreview'))
+  },
+
+  previewTable: function (data) {
+    console.log('inside previewTable. data : ', data);
+    var self = this;
+    var defaultColumnNames = data.header.map(function(item,index){
+      return { "name": self.COLUMN_NAME_PREFIX + (index + 1) }
+    });
+    this.set("defaultColumnNames",defaultColumnNames);
+    this.set("previewData", data);
+    this.set("header", this.get("previewData.header"));
+    this.set('isFirstRowHeader', this.get("previewData.isFirstRowHeader"));
+    this.set('tableName', this.get("previewData.tableName"));
+    if (data.isFirstRowHeader == true) {
+      this.set("firstRow", this.get("previewData.header"));
+    }else {
+      if(data.rows.length > 0){
+        this.set("firstRow", this.get("previewData.rows")[0].row);
+      }else{
+        this.set("firstRow", Ember.A());
+      }
+    }
+    this.set("rows", this.get("previewData.rows"));
+    this.controller.set('tableName', this.get("previewData.tableName"));
+    this.controller.set("rows", this.get("previewData.rows"));
+    this.controller.set("columns", this.transformToColumnModelList(this.get("previewData.header")));
+  },
+
+  transformToColumnModelList : function(columns){
+    var _this = this;
+    if(columns){
+      return columns.map(function(column){
+        return _this.transformToColumnModel(column);
+      });
+    }
+    return Ember.A();
+  },
+
+  transformToColumnModel: function (column) {
+    return Column.create({
+      name: column.name,
+      type: datatypes.findBy("label", column.type),
+      editing: true
+    })
+  },
+  onGeneratePreviewSuccess: function (data) {
+    console.log("onGeneratePreviewSuccess");
+    this.set("showPreview",true);
+    this.hideUploadModal();
+    this.previewTable(data);
+  },
+
+  onGeneratePreviewFailure: function (error) {
+    console.log("onGeneratePreviewFailure");
+    this.set("showPreview",false);
+    this.hideUploadModal();
+    this.setError(error);
+  },
+
+  createActualTable: function (tableData) {
+    console.log("createActualTable");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateActualTable'));
+    var retValue = this.createTable(tableData.get("tableMeta"));
+    return retValue;
+    // var self = this;
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateActualTable'));
+    // var headers = this.get('header');
+    // var selectedDatabase = this.get('selectedDatabase');
+    // if (!selectedDatabase) {
+    //   throw new Error(this.translate('hive.errors.emptyDatabase', {database : this.translate("hive.words.database")}));
+    // }
+    //
+    // this.set('databaseName', this.get('selectedDatabase.id'));
+    // var databaseName = this.get('databaseName');
+    // var tableName = this.get("tableMeta").name;
+    // var isFirstRowHeader = this.get('isFirstRowHeader');
+    // var filetype = this.get("selectedFileType");
+    //
+    // this.validateInput(headers,tableName,databaseName,isFirstRowHeader);
+    // this.showUploadModal();
+    // var rowFormat = this.getRowFormat();
+    // return this.getUploader().createTable({
+    //   "isFirstRowHeader": isFirstRowHeader,
+    //   "header": headers,
+    //   "tableName": tableName,
+    //   "databaseName": databaseName,
+    //   "hiveFileType":filetype,
+    //   "rowFormat": { "fieldsTerminatedBy" : rowFormat.fieldsTerminatedBy, "escapedBy" : rowFormat.escapedBy}
+    // });
+  },
+  getRowFormat : function(){
+    var fieldsTerminatedBy = this.getAsciiChar('fieldsTerminatedBy');
+    var escapedBy = this.getAsciiChar('escapedBy');
+    return {"fieldsTerminatedBy": fieldsTerminatedBy, "escapedBy" : escapedBy};
+  },
+  waitForCreateActualTable: function (jobId) {
+    console.log("waitForCreateActualTable");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToCreateActualTable'));
+    var self = this;
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
+    });
+
+    return p;
+  },
+  onCreateActualTableSuccess: function () {
+    console.log("onCreateTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyCreatedActualTable'));
+  },
+  onCreateActualTableFailure: function (error) {
+    console.log("onCreateActualTableFailure");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToCreateActualTable'));
+    this.setError(error);
+  },
+  createTempTable: function (tableData) {
+    let tableMeta = JSON.parse(JSON.stringify(tableData.get("tableMeta")));
+    // manually copy the columns as they are missing members when copying
+    let columns = tableData.get("tableMeta").columns.map(function(col){
+      return col.copy();
+    });
+    tableMeta.columns = columns;
+
+    console.log("tableMeta : ", tableMeta);
+
+    var self = this;
+    console.log("createTempTable");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateTemporaryTable'));
+    var tempTableName = this.generateTempTableName();
+    tableMeta.name = tempTableName;
+
+    var headers = tableMeta.columns.map(function(column){
+      if(tableData.fileFormatInfo.containsEndlines){
+        column.type.label = "STRING";
+        delete column.scale;
+        delete column.precision;
+      }
+      return column;
+    });
+
+    tableMeta.columns = headers;
+    tableMeta.settings = {};
+    tableMeta.properties = [];
+    tableMeta.settings.fileFormat = {};
+    tableMeta.settings.fileFormat.type = "TEXTFILE";
+    this.set("tableData.tempTableMeta", tableMeta);
+    return this.createTable(tableMeta);
+    // return this.getUploader().createTable({
+    //   "isFirstRowHeader": this.get("isFirstRowHeader"),
+    //   "header": headers,
+    //   "tableName": tempTableName,
+    //   "databaseName": this.get('databaseName'),
+    //   "hiveFileType":"TEXTFILE",
+    //   "rowFormat": { "fieldsTerminatedBy" : parseInt('1', 10), "escapedBy" : null}
+    // });
+  },
+
+  waitForCreateTempTable: function (jobId) {
+    console.log("waitForCreateTempTable");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToCreateTemporaryTable'));
+    var self = this;
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
+    });
+
+    return p;
+  },
+
+  onCreateTempTableSuccess: function () {
+    console.log("onCreateTempTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyCreatedTemporaryTable'));
+  },
+
+  deleteTable : function(databaseName, tableName){
+    console.log("deleting table ", databaseName , "." , tableName);
+    return this.getUploader().deleteTable({
+      "database":  databaseName,
+      "table": tableName
+    });
+  },
+
+  deleteTableOnError: function (databaseName, tableName, tableLabel) {
+    //delete table and wait for delete job
+    var self = this;
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.deletingTable',{table:tableLabel}));
+
+    return this.deleteTable(databaseName, tableName).then(function (job) {
+      return new Ember.RSVP.Promise(function (resolve, reject) {
+        self.waitForJobStatus(job.id, resolve, reject);
+      });
+    }).then(function () {
+      self.popUploadProgressInfos();
+      self.pushUploadProgressInfos(this.formatMessage('hive.messages.succesfullyDeletedTable',{table:tableLabel}));
+      return Ember.RSVP.Promise.resolve();
+    }, function (err) {
+      self.popUploadProgressInfos();
+      self.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToDeleteTable',{table:tableLabel}));
+      self.setError(err);
+      return Ember.RSVP.Promise.reject();
+    });
+  },
+
+  rollBackActualTableCreation : function(){
+    return this.deleteTableOnError(this.get("database"),this.get("tableMeta").name,this.translate('hive.words.actual'));
+  },
+
+  translate : function(str,vars){
+    return this.get('i18n').t(str,vars);
+  },
+  formatMessage : function(messageId, vars){
+    return this.translate(messageId, vars);
+  },
+  onCreateTempTableFailure : function(error){
+    console.log("onCreateTempTableFailure");
+    this.setError(error);
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToCreateTemporaryTable'));
+    return this.rollBackActualTableCreation().then(function(data){
+      return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+    }, function (err) {
+      return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+    });
+  },
+
+  uploadFile: function (tableData) {
+    console.log("uploadFile");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToUploadFile'));
+    let uploadSource = tableData.get("fileInfo").get("uploadSource");
+    if(uploadSource === "local"){
+      return this.uploadTable(tableData);
+    }else{
+      return this.uploadTableFromHdfs(tableData);
+    }
+  },
+
+  waitForUploadingFile: function (data) {
+    console.log("waitForUploadingFile");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToUploadFile'));
+    if( data.jobId ){
+      var self = this;
+      var p = new Ember.RSVP.Promise(function (resolve, reject) {
+        self.waitForJobStatus(data.jobId, resolve, reject);
+      });
+      return p;
+    }else{
+      return  Ember.RSVP.Promise.resolve(data);
+    }
+  },
+
+  onUploadingFileSuccess: function () {
+    console.log("onUploadingFileSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyUploadedFile') );
+  },
+
+  rollBackTempTableCreation: function () {
+    var self = this;
+    return this.deleteTableOnError(this.get("database"),this.get("tempTableMeta").name,this.translate('hive.words.temporary')).then(function(data){
+      return self.rollBackActualTableCreation();
+    },function(err){
+      return self.rollBackActualTableCreation();
+    })
+  },
+
+  onUploadingFileFailure: function (error) {
+    console.log("onUploadingFileFailure");
+    this.setError(error);
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToUploadFile'));
+    return this.rollBackTempTableCreation().then(function(data){
+      return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+    },function(err){
+      return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+    });
+  },
+
+  rollBackUploadFile : function(){
+    return this.rollBackTempTableCreation();
+  },
+
+  insertIntoTable : function(tableData){
+    console.log("insertIntoTable");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToInsertRows'));
+
+    let headers = tableData.get("tableMeta").columns.map(function(column){
+        var header = JSON.parse(JSON.stringify(column));
+        header.type = column.type.label;
+        return header;
+    });
+
+    return this.getUploader().insertIntoTable({
+      "fromDatabase": tableData.get("database"),
+      "fromTable": tableData.get("tempTableMeta").name,
+      "toDatabase": tableData.get("database"),
+      "toTable": tableData.get("tableMeta").name,
+      "header": headers,
+      "unhexInsert": tableData.fileFormatInfo.containsEndlines
+    });
+  },
+
+  waitForInsertIntoTable: function (jobId) {
+    console.log("waitForInsertIntoTable");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToInsertRows'));
+    var self = this;
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
+    });
+
+    return p;
+  },
+
+  onInsertIntoTableSuccess: function () {
+    console.log("onInsertIntoTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyInsertedRows'));
+  },
+
+  onInsertIntoTableFailure: function (error) {
+    console.log("onInsertIntoTableFailure");
+    this.setError(error);
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToInsertRows'));
+    return this.rollBackUploadFile().then(function(data){
+      return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+    },function(err){
+      return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+    });
+  },
+  deleteTempTable : function(tableData){
+    console.log("deleteTempTable");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToDeleteTemporaryTable'));
+
+    return this.deleteTable(
+      tableData.get("database"),
+      tableData.get("tempTableMeta").name
+    );
+  },
+  waitForDeleteTempTable: function (jobId) {
+    console.log("waitForDeleteTempTable");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToDeleteTemporaryTable'));
+    var self = this;
+    var p = new Ember.RSVP.Promise(function (resolve, reject) {
+      self.waitForJobStatus(jobId, resolve, reject);
+    });
+
+    return p;
+  },
+  onDeleteTempTableSuccess: function () {
+    console.log("onDeleteTempTableSuccess");
+    this.popUploadProgressInfos();
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyDeletedTemporaryTable'));
+    this.onUploadSuccessfull();
+  },
+  onDeleteTempTableFailure: function (error) {
+    console.log("onDeleteTempTableFailure");
+    this.setError(error);
+    this.setError(this.formatMessage('hive.messages.manuallyDeleteTable',{databaseName:this.get('databaseName'), tableName: this.get("tempTableName")}));
+  },
+  validateHDFSPath: function (hdfsPath) {
+    if (null == hdfsPath || hdfsPath == "") throw new Error(this.translate('hive.errors.emptyHdfsPath'));
+    var hdfsRegex = new RegExp(this.get("HDFS_PATH_REGEX"), "g");
+    var mArr = hdfsPath.match(hdfsRegex);
+    if (mArr == null || mArr.length != 1) throw new Error(this.translate('hive.errors.illegalHdfPath', {"hdfsPath": hdfsPath} ));
+  },
+  createTableAndUploadFile: function (tableData) {
+    let databaseModel = this.controllerFor('databases.database').get('model');
+    let database = databaseModel.get('name');
+    tableData.set("database", database);
+    this.set("tableData", tableData);
+
+    var self = this;
+    self.setError();
+    self.createActualTable(tableData)
+      .then(function(job){
+        console.log("1. received job : ", job);
+        return self.waitForCreateActualTable(job.id);
+      },function(error){
+        console.log("Error occurred: ", error);
+        self.onCreateActualTableFailure(error);
+        throw error;
+      })
+      .then(function(data){
+        self.onCreateActualTableSuccess(data);
+        return self.createTempTable(tableData);
+      },function(error){
+        if(!self.get('error')){
+          console.log("Error occurred: ", error);
+          self.onCreateActualTableFailure(error);
+        }
+        throw error;
+      })
+      .then(function(job){
+        return self.waitForCreateTempTable(job.id);
+      },function(error){
+        if(!self.get('error')){
+          console.log("Error occurred: ", error);
+          return self.onCreateTempTableFailure(error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        self.onCreateTempTableSuccess(data);
+        return self.uploadFile(tableData);
+      },function(error){
+        if(!self.get('error')){
+          console.log("Error occurred: ", error);
+          return self.onCreateTempTableFailure(error);
+        }
+        throw error;
+      }).then(function(data){
+      return self.waitForUploadingFile(data);
+    },function(error){
+      if(!self.get('error')){
+        console.log("Error occurred: ", error);
+        return self.onUploadingFileFailure(error);
+      }
+      throw error;
+    })
+      .then(function(data){
+        self.onUploadingFileSuccess(data);
+        return self.insertIntoTable(tableData);
+      },function(error){
+        if(!self.get('error')){
+          console.log("Error occurred: ", error);
+          return self.onUploadingFileFailure(error);
+        }
+        throw error;
+      })
+      .then(function(job){
+        return self.waitForInsertIntoTable(job.id);
+      },function(error){
+        if(!self.get('error')){
+          console.log("Error occurred: ", error);
+          return self.onInsertIntoTableFailure(error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        self.onInsertIntoTableSuccess(data);
+        return self.deleteTempTable(tableData);
+      },function(error){
+        if(!self.get('error')){
+          console.log("Error occurred: ", error);
+          return self.onInsertIntoTableFailure(error);
+        }
+        throw error;
+      })
+      .then(function(job){
+        return self.waitForDeleteTempTable(job.id);
+      },function(error){
+        if(!self.get('error')){
+          console.log("Error occurred: ", error);
+          self.onDeleteTempTableFailure(error);
+        }
+        throw error;
+      })
+      .then(function(data){
+        self.onDeleteTempTableSuccess(data);
+      },function(error){
+        if(!self.get('error')){
+          console.log("Error occurred: ", error);
+          self.onDeleteTempTableFailure(error);
+        }
+        throw error;
+      })
+      .catch(function(error){
+        console.log("inside catch : ", error);
+      })
+      .finally(function(){
+        console.log("finally hide the modal always");
+        self.hideUploadModal();
+      });
+  },
+  validateInput: function (headers,tableName,databaseName,isFirstRowHeader) {
+    // throw exception if invalid.
+    if(!headers || headers.length == 0) throw new Error(this.translate('hive.errors.emptyHeaders'));
+
+    var regex = new RegExp(this.get("COLUMN_NAME_REGEX"),"g");
+
+    headers.forEach(function(column,index){
+      if( !column  ) throw new Error(this.translate('hive.errors.emptyColumnName'));
+      var matchArr = column.name.match(regex);
+      if(matchArr == null || matchArr.length != 1 ) throw new Error(this.translate('hive.errors.illegalColumnName',{ columnName : column.name, index : (index + 1)}));
+    },this);
+
+    if(!tableName) throw new Error(this.translate('hive.errors.emptyTableName', {tableNameField : this.translate('hive.ui.tableName')}));
+    var tableRegex = new RegExp(this.get("TABLE_NAME_REGEX"),"g");
+    var mArr = tableName.match(tableRegex);
+    if(mArr == null || mArr.length != 1 ) throw new Error(this.translate('hive.errors.illegalTableName', {tableNameField:this.translate('hive.ui.tableName'),tableName:tableName}) );
+
+    if(!databaseName) throw new Error(this.translate('hive.errors.emptyDatabase', {database:this.translate('hive.words.database')}));
+
+    if (null == isFirstRowHeader || typeof isFirstRowHeader === 'undefined') { //this can be true or false. so explicitly checking for null/ undefined.
+      throw new Error(this.translate('hive.errors.emptyIsFirstRow', {isFirstRowHeaderField:this.translate('hive.ui.isFirstRowHeader')}));
+    }
+  },
+  setError: function (error) {
+    if(error){
+      console.log(" error : ", error);
+      this.set('error', JSON.stringify(error));
+      // this.get('notifyService').warn(error);
+      // TODO : add notifyService warn message.
+      console.log("TODO : add notifyService warn message.");
+    }else{
+      this.set("error");
+    }
+  },
+  previewError: function (error) {
+    this.setError(error);
+  },
+  uploadTableFromHdfs : function(tableData){
+    console.log("uploadTableFromHdfs called.");
+    // if(!(this.get("inputFileTypeCSV") == true && this.get("isFirstRowHeader") == false) ){
+      this.pushUploadProgressInfos(this.formatMessage('uploadingFromHdfs'));
+    // }
+    var csvParams = tableData.get("fileFormatInfo.csvParams");
+    let columns = tableData.get("tableMeta").columns.map(function(column){
+      return {"name": column.get("name"), "type": column.get("type.label")};
+    });
+    let header = columns; //JSON.stringify(columns);
+
+    return this.getUploader().uploadFromHDFS({
+      "isFirstRowHeader": tableData.get("fileFormatInfo.isFirstRowHeader"),
+      "databaseName": tableData.get("database"),
+      "tableName": tableData.get("tempTableMeta").name,
+      "inputFileType": tableData.get("fileFormatInfo.inputFileType").id,
+      "hdfsPath": tableData.get("fileInfo.hdfsPath"),
+      "header": header,
+      "containsEndlines": tableData.get("fileFormatInfo.containsEndlines"),
+      "csvDelimiter": csvParams.get("csvDelimiter").name,
+      "csvEscape": csvParams.get("csvEscape").name,
+      "csvQuote": csvParams.get("csvQuote").name
+    });
+  },
+  uploadTable: function (tableData) {
+    this.printValues();
+    var csvParams = tableData.get("fileFormatInfo.csvParams");
+    let columns = tableData.get("tableMeta").columns.map(function(column){
+      return {"name": column.get("name"), "type": column.get("type.label")};
+    });
+    let header = JSON.stringify(columns);
+    return this.getUploader().uploadFiles('upload', tableData.get("fileInfo.files"), {
+      "isFirstRowHeader": tableData.get("fileFormatInfo.isFirstRowHeader"),
+      "databaseName" :  tableData.get("database"),
+      "tableName" : tableData.get("tempTableMeta").name,
+      "inputFileType" : tableData.get("fileFormatInfo.inputFileType").id,
+      "header": header,
+      "containsEndlines": tableData.get("fileFormatInfo.containsEndlines"),
+      "csvDelimiter": csvParams.get("csvDelimiter").name,
+      "csvEscape": csvParams.get("csvEscape").name,
+      "csvQuote": csvParams.get("csvQuote").name
+    });
+  },
+
+  onUploadSuccessfull: function (data) {
+    console.log("onUploadSuccessfull : ", data);
+    this._transitionToCreatedTable(this.get("tableData").get('database'), this.get("tableData").get('tableMeta').name);
+
+    // this.get('notifyService').success(this.translate('hive.messages.successfullyUploadedTableHeader'),
+    //   this.translate('hive.messages.successfullyUploadedTableMessage' ,{tableName:this.get("tableData").get("tableMeta").name ,databaseName:this.get("tableData").get("database")}));
+    this.clearFields();
+  },
+
+  onUploadError: function (error) {
+    console.log("onUploadError : ", error);
+    this.setError(error);
+  },
+  showOrHide: function () {
+    if (this.get('show') == false) {
+      this.set("displayOption", "display:none");
+      this.set("showMoreOrLess", "Show More");
+    } else {
+      this.set("displayOption", "display:table-row");
+      this.set("showMoreOrLess", "Show Less");
+    }
+  },
+
+  displayOption: "display:none",
+  actions: {
+  toggleCSVFormat: function() {
+    console.log("inside toggleCSVFormat");
+    this.toggleProperty('showCSVFormatInput');
+  },
+  hideInputParamModal : function(){
+      Ember.$("#inputParamsModal").modal("hide");
+    },
+    showInputParamModal : function(){
+      if(this.get('inputFileTypeCSV')){
+        Ember.$("#inputParamsModal").modal("show");
+      }
+    },
+    hideRowFormatModal : function(){
+      Ember.$("#rowFormatModal").modal("hide");
+    },
+    showRowFormatModal : function(){
+      if(this.get('storedAsTextFile')) {
+        Ember.$("#rowFormatModal").modal("show");
+      }
+    },
+    toggleErrors: function () {
+      this.toggleProperty('showErrors');
+    },
+    // filesUploaded: function (files) {
+    //   console.log("upload-table.js : uploaded new files : ", files);
+    //   this.clearFields();
+    //
+    //   this.set('files', files);
+    //   var name = files[0].name;
+    //   var i = name.indexOf(".");
+    //   var tableName = name.substr(0, i);
+    //   this.set('tableName', tableName);
+    //   var self = this;
+    //   return this.generatePreview(sourceObject)
+    // },
+    preview: function (previewObject) {
+      console.log("upload-table.js : uploaded new files : ", previewObject);
+      this.clearFields();
+
+      this.set('previewObject', previewObject);
+      // var name = previewObject.get("fileInfo").get("files")[0].name;
+      // var i = name.indexOf(".");
+      // var tableName = name.substr(0, i);
+      // this.set('tableName', tableName);
+      // var self = this;
+      return this.generatePreview(previewObject)
+    },
+    previewFromHdfs: function () {
+      return this.generatePreview();
+    },
+    uploadTable: function (tableData) {
+      console.log("tableData", tableData);
+      try {
+        this.createTableAndUploadFile(tableData);
+      } catch (e) {
+        console.log("exception occured : ", e);
+        this.setError(e);
+        this.hideUploadModal();
+      }
+    },
+    uploadFromHDFS: function () {
+      this.set("isLocalUpload", false);
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
index 453eb12..8503715 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
@@ -66,4 +66,7 @@ export default Ember.Service.extend({
     this.get('store').adapterFor('job').fetchResult(jobId);
   },
 
+  getJob: function (jobId) {
+    return this.get('store').findRecord('job', jobId, {reload: true})
+  }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js b/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
index 16eba35..d11816c 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
@@ -35,7 +35,7 @@ export default Ember.Service.extend({
       detailedInfo: detailedInfo,
       storageInfo: storageInfo
     });
-    return new Promise((resolve, reject) => {
+    return new Ember.RSVP.Promise((resolve, reject) => {
       this.get('store').adapterFor('table').createTable(tableInfo).then((data) => {
         this.get('store').pushPayload(data);
         resolve(this.get('store').peekRecord('job', data.job.id));
@@ -46,7 +46,7 @@ export default Ember.Service.extend({
   },
 
   deleteTable(database, table) {
-    return new Promise((resolve, reject) => {
+    return new Ember.RSVP.Promise((resolve, reject) => {
       this.get('store').adapterFor('table').deleteTable(database, table).then((data) => {
         this.get('store').pushPayload(data);
         resolve(this.get('store').peekRecord('job', data.job.id));

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
new file mode 100644
index 0000000..a7cb862
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
@@ -0,0 +1,118 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="panel panel-info">
+  <div class="panel-heading">
+    <div class="panel-title">
+      <button class="btn btn-primary {{if showCSVFormatInput 'active'}}" {{action
+      "toggleCSVFormat"}}>
+      {{fa-icon (if showCSVFormatInput "minus" "plus")}}
+      </button>
+      &nbsp;&nbsp;&nbsp;Select File Format
+    </div>
+  </div>
+  {{#if showCSVFormatInput}}
+  <div class="panel-body rowformat-custom-row">
+    <div class="row">
+      <div class="col-md-6 form-horizontal">
+        <div class="form-group">
+          <label class="col-md-4 control-label">{{t 'hive.ui.fileSource.fileType'}}</label>
+          <div class="col-md-7">
+            {{#power-select
+            selected=fileFormatInfo.inputFileType
+            options=inputFileTypes
+            searchField="name"
+            searchPlaceholder=(t 'hive.ui.fileSource.selectFileType')
+            onchange=(action "inputFileTypeSelected") as |parameter|}}
+            {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+            {{/power-select}}
+          </div>
+          <div class="col-md-1">
+            <a class="text-danger" {{action "clearInputFileType"}}>{{fa-icon "times" size="lg"}}</a>
+          </div>
+        </div>
+      </div>
+    </div>
+    {{#if inputFileTypeCSV}}
+
+    <div class="row">
+      <div class="col-md-6 form-horizontal">
+        <div class="form-group">
+          <label class="col-md-4 control-label">{{t 'hive.ui.csvFormatParams.columnDelimterField'}}</label>
+          <div class="col-md-7">
+            {{#power-select
+            selected=fileFormatInfo.csvParams.csvDelimiter
+            options=terminationChars
+            searchField="name"
+            searchPlaceholder=(t 'hive.ui.csvFormatParams.columnDelimiterTooltip')
+            onchange=(action "csvDelimiterSelected") as |parameter|}}
+            {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+            {{/power-select}}
+          </div>
+          <div class="col-md-1">
+            <a class="text-danger" {{action "clearColumnDelimter"}}>{{fa-icon "times" size="lg"}}</a>
+          </div>
+        </div>
+      </div>
+    </div>
+    <div class="row">
+      <div class="col-md-6 form-horizontal">
+        <div class="form-group">
+          <label class="col-md-4 control-label">{{t 'hive.ui.csvFormatParams.escapeCharacterField'}}</label>
+          <div class="col-md-7">
+            {{#power-select
+            selected=fileFormatInfo.csvParams.csvEscape
+            options=terminationChars
+            searchField="name"
+            searchPlaceholder=(t 'hive.ui.csvFormatParams.escapeCharacterTooltip')
+            onchange=(action "csvEscapeSelected") as |parameter|}}
+            {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+            {{/power-select}}
+          </div>
+          <div class="col-md-1">
+            <a class="text-danger" {{action "clearEscapeCharacter"}}>{{fa-icon "times" size="lg"}}</a>
+          </div>
+        </div>
+      </div>
+    </div>
+    <div class="row">
+      <div class="col-md-6 form-horizontal">
+        <div class="form-group">
+          <label class="col-md-4 control-label">{{t 'hive.ui.csvFormatParams.quoteCharacterField'}}</label>
+          <div class="col-md-7">
+            {{#power-select
+            selected=fileFormatInfo.csvParams.csvQuote
+            options=terminationChars
+            searchField="name"
+            searchPlaceholder=(t 'hive.ui.csvFormatParams.quoteCharacterTooltip')
+            onchange=(action "csvQuoteSelected") as |parameter|}}
+            {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+            {{/power-select}}
+          </div>
+          <div class="col-md-1">
+            <a class="text-danger" {{action "clearCsvQuote"}}>{{fa-icon "times" size="lg"}}</a>
+          </div>
+        </div>
+      </div>
+    </div>
+    {{/if}}
+  </div>
+  {{/if}}
+</div>
+
+{{yield}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/templates/components/radio-button.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/radio-button.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/radio-button.hbs
new file mode 100644
index 0000000..6ae472f
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/radio-button.hbs
@@ -0,0 +1,19 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{yield}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/templates/components/simple-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/simple-table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/simple-table.hbs
new file mode 100644
index 0000000..1a76cc6
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/simple-table.hbs
@@ -0,0 +1,42 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div>
+  <table class="table table-expandable no-border">
+    <thead>
+    <tr>
+      {{#each header as |column|}}
+      <th>
+        {{column.name}}
+      </th>
+      {{/each}}
+    </tr>
+    </thead>
+    <tbody>
+    {{#each rows as |row|}}
+    <tr>
+      {{#each row.row as |item|}}
+      <td>{{item}}</td>
+      {{/each}}
+    </tr>
+    {{/each}}
+    </tbody>
+  </table>
+</div>
+
+{{yield}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table-source.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table-source.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table-source.hbs
new file mode 100644
index 0000000..c8e57e7
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table-source.hbs
@@ -0,0 +1,112 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="panel panel-info">
+  <div class="panel-heading">
+    <div class="panel-title">
+      <button class="btn btn-primary {{if showFileSourceInput 'active'}}" {{action
+      "toggleFileSource"}}>
+      {{fa-icon (if showFileSourceInput "minus" "plus")}}
+      </button>
+      &nbsp;&nbsp;&nbsp;Select File Source
+    </div>
+  </div>
+  {{#if showFileSourceInput}}
+  <div class="panel-body rowformat-custom-row">
+    <div class="row">
+      <div class="col-md-12 form-horizontal">
+        <div class="form-group">
+          <label class="col-md-3 control-label">{{t 'hive.ui.fileSource.uploadFromHdfs'}}</label>
+          <div class="col-md-3">
+            {{radio-button value='hdfs' checked=fileInfo.uploadSource}}
+          </div>
+          <label class="col-md-3 control-label">{{t 'hive.ui.fileSource.uploadFromLocal'}}</label>
+          <div class="col-md-3">
+            {{radio-button value='local' checked=fileInfo.uploadSource}}
+          </div>
+        </div>
+      </div>
+    </div>
+
+    {{#if showHdfsLocationInput}}
+    <div class="panel-body">
+      <div class="row">
+        <div class="col-md-12 form-horizontal">
+          <div class="form-group">
+            <label class="col-md-3 control-label">{{t 'hive.ui.fileSource.enterHdfsPathLabel'}}</label>
+            <div class="col-md-6">
+              {{input type="text" class="form-control" value=fileInfo.hdfsPath}}
+            </div>
+          </div>
+        </div>
+
+        <!--
+        <button class="btn btn-success" {{action
+        "toggleDirectoryViewer"}}>{{t 'hive.ui.fileSource.selectHdfsLocation'}}</button>
+        -->
+      </div>
+      <!--
+      {{#if showDirectoryViewer}}
+      {{hdfs-viewer-modal
+      showSelectedPath=true
+      close="closeHdfsModal"
+      selected="hdfsPath"
+      }}
+      {{/if}}
+      -->
+    </div>
+    {{/if}}
+
+    {{#if showLocalLocationInput}}
+    <div class="panel-body">
+      <div class="row">
+        <div class="col-md-12 form-horizontal">
+          <div class="form-group">
+            <label class="col-md-3 control-label">{{t 'hive.ui.fileSource.selectLocalFileLabel'}}</label>
+            <div class="col-md-6">
+              {{#file-picker fileLoaded="onFileChanged" preview=false}}
+              <div class="text-center vert-align-middle">
+                {{fa-icon "cloud-upload" size="4"}}
+                <h4> Drag file to upload or click to browse</h4>
+              </div>
+              {{/file-picker}}
+            </div>
+          </div>
+        </div>
+
+        <!--
+        <button class="btn btn-success" {{action
+        "toggleDirectoryViewer"}}>{{t 'hive.ui.fileSource.selectHdfsLocation'}}</button>
+        -->
+      </div>
+      <!--
+      {{#if showDirectoryViewer}}
+      {{hdfs-viewer-modal
+      showSelectedPath=true
+      close="closeHdfsModal"
+      selected="hdfsPath"
+      }}
+      {{/if}}
+      -->
+    </div>
+    {{/if}}
+  </div>
+  {{/if}}
+</div>
+
+{{yield}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table.hbs
new file mode 100644
index 0000000..e4388f0
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table.hbs
@@ -0,0 +1,59 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="table-body">
+  {{csv-format-params fileFormatInfo=fileFormatInfo}}
+</div>
+
+<div class="table-body">
+  {{upload-table-source onFileChanged='onFileChanged' fileInfo=fileInfo}}
+</div>
+
+
+<div class="create-table-controls">
+  <button class="btn btn-success" {{action
+  "preview"}}>{{fa-icon "eye"}} Preview</button>
+</div>
+
+<div class="table-body">
+  <div class="panel panel-info">
+    <div class="panel-heading">
+      <div class="panel-title">
+        <button class="btn btn-primary {{if showPreview 'active'}}" {{action
+        "toggleShowPreview"}}>
+        {{fa-icon (if showPreview "minus" "plus")}}
+        </button>
+        &nbsp;&nbsp;&nbsp;Table Preview
+      </div>
+    </div>
+    {{#if showPreview}}
+    {{simple-table header=columns rows=rows }}
+    {{/if}}
+  </div>
+</div>
+
+
+<div class="col-md-12 table-info">
+  <div class="table-body">
+    {{create-table tabs=tabs
+    cancel="cancel"
+    create="createAndUpload" columns=columns tableName=tableName }}
+  </div>
+</div>
+
+{{yield}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/templates/components/validated-text-field.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/validated-text-field.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/validated-text-field.hbs
new file mode 100644
index 0000000..7cf0fcf
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/validated-text-field.hbs
@@ -0,0 +1,23 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{!
+* see example in validated-text-field.js component file
+}}
+
+{{input class=inputClass value=inputValue title=message placeholder=placeholder}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
index 39b7a9e..4f3b98a 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
@@ -32,7 +32,11 @@
     {{/modal-dialog}}
   {{/if}}
   <div class="table-header row">
-    <p class="text-uppercase">table<strong>&nbsp;&nbsp;>&nbsp;&nbsp;create table</strong></p>
+    <p class="text-uppercase">table<strong>&nbsp;&nbsp;>&nbsp;&nbsp;create table</strong>
+    <div class="pull-right">
+    {{#link-to "databases.database.tables.upload-table" }}<p class="text-uppercase">{{fa-icon "upload"}}&nbsp;upload table</p>{{/link-to}}
+    </div>
+  </p>
   </div>
   <div class="table-body">
     {{create-table tabs=tabs

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/upload-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/upload-table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/upload-table.hbs
new file mode 100644
index 0000000..0091ede
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/upload-table.hbs
@@ -0,0 +1,45 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+<div class="col-md-12 table-info">
+
+<div class="table-body">
+  <div class="table-header row">
+    <p class="text-uppercase">table<strong>&nbsp;&nbsp;>&nbsp;&nbsp;upload table</strong></p>
+  </div>
+</div>
+
+<div class="col-md-12 table-info">
+{{#if showUploadTableModal}}
+{{#modal-dialog
+translucentOverlay=true
+container-class="modal-dialog modal-sm"}}
+<div class="modal-content">
+  <div class="modal-header text-danger">
+    <p class="modal-title">{{fa-icon "plus"}}&nbsp;&nbsp;&nbsp; Upload Table</p>
+  </div>
+  <div class="modal-body text-center text-primary">
+    <p>{{uploadTableMessage}}</p>
+  </div>
+</div><!-- /.modal-content -->
+{{/modal-dialog}}
+{{/if}}
+</div>
+
+{{upload-table tabs=tabs columns=columns rows=rows tableName=tableName tableMeta=tableMeta cancel="cancel" createAndUpload="uploadTable" preview="preview"}}
+</div>
+{{yield}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/utils/constants.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/utils/constants.js b/contrib/views/hive20/src/main/resources/ui/app/utils/constants.js
new file mode 100644
index 0000000..5405773
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/utils/constants.js
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Object.create({
+  /**
+   * This should reflect the naming conventions accross the application.
+   * Changing one value also means changing the filenames for the chain of files
+   * represented by that value (routes, controllers, models etc).
+   * This dependency goes both ways.
+  */
+  namingConventions: {
+    routes: {
+    },
+
+    subroutes: {
+    },
+
+    job: 'job'
+  },
+
+  services: {
+    alertMessages: 'alert-messages',
+    jobs: 'jobs',
+  },
+
+  jobReferrer: {
+    sample: 'SAMPLE',
+    explain: 'EXPLAIN',
+    visualExplain: 'VISUALEXPLAIN',
+    job: 'JOB',
+    user: 'USER',
+    internal: 'INTERNAL'
+  },
+
+  statuses: {
+    unknown: "UNKNOWN",
+    initialized: "INITIALIZED",
+    running: "RUNNING",
+    succeeded: "SUCCEEDED",
+    canceled: "CANCELED",
+    closed: "CLOSED",
+    error: "ERROR",
+    failed: 'FAILED',
+    killed: 'KILLED',
+    pending: "PENDING"
+  },
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/bower.json b/contrib/views/hive20/src/main/resources/ui/bower.json
index 4eadee7..a4ce788 100644
--- a/contrib/views/hive20/src/main/resources/ui/bower.json
+++ b/contrib/views/hive20/src/main/resources/ui/bower.json
@@ -6,6 +6,7 @@
     "ember-qunit-notifications": "0.1.0",
     "font-awesome": "~4.5.0",
     "codemirror": "~5.15.0",
-    "bootstrap-treeview": "~1.2.0"
+    "bootstrap-treeview": "~1.2.0",
+    "blob": "*"
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/config/environment.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/config/environment.js b/contrib/views/hive20/src/main/resources/ui/config/environment.js
index e45de05..3714eb9 100644
--- a/contrib/views/hive20/src/main/resources/ui/config/environment.js
+++ b/contrib/views/hive20/src/main/resources/ui/config/environment.js
@@ -66,5 +66,9 @@ module.exports = function(environment) {
 
   }
 
+  ENV.i18n = {
+    defaultLocale: 'en'
+  };
+
   return ENV;
 };

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/package.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/package.json b/contrib/views/hive20/src/main/resources/ui/package.json
index ed7a299..a066bfd 100644
--- a/contrib/views/hive20/src/main/resources/ui/package.json
+++ b/contrib/views/hive20/src/main/resources/ui/package.json
@@ -30,6 +30,7 @@
     "ember-cli-babel": "^5.1.6",
     "ember-cli-daterangepicker": "0.3.0",
     "ember-cli-dependency-checker": "^1.2.0",
+    "ember-cli-file-picker": "0.0.10",
     "ember-cli-flash": "1.4.0",
     "ember-cli-htmlbars": "^1.0.3",
     "ember-cli-htmlbars-inline-precompile": "^0.3.1",
@@ -46,6 +47,7 @@
     "ember-data": "^2.7.0",
     "ember-export-application-global": "^1.0.5",
     "ember-font-awesome": "2.2.0",
+    "ember-i18n": "4.5.0",
     "ember-light-table": "1.8.0",
     "ember-load-initializers": "^0.5.1",
     "ember-modal-dialog": "0.9.0",
@@ -54,6 +56,7 @@
     "ember-resolver": "^2.0.3",
     "ember-responsive": "2.0.0",
     "ember-sass-bootstrap": "0.1.2",
+    "ember-uploader": "1.2.3",
     "loader.js": "^4.0.1"
   },
   "ember-addon": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
index 8006e91..5939b03 100644
--- a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive20.resources.upload;
 import org.apache.ambari.view.hive20.client.ColumnDescription;
 import org.apache.ambari.view.hive20.client.ColumnDescriptionShort;
 import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
 import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
 import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser;
 import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
@@ -54,8 +55,8 @@ public class DataParserCSVTest {
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(2, pd.getPreviewRows().size()); // now it will not return the first row which is header
       Assert.assertEquals(2, pd.getHeader().size());
-      ColumnDescription[] cd = {new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
-              new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+      ColumnInfo[] cd = {new ColumnInfo("1", ColumnDescriptionShort.DataTypes.INT.toString()),
+              new ColumnInfo("a", ColumnDescriptionShort.DataTypes.CHAR.toString())};
 
       Object cols2[] = new Object[2];
       cols2[0] = "2";
@@ -102,15 +103,15 @@ public class DataParserCSVTest {
       PreviewData pd = dp.parsePreview();
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(4, pd.getHeader().size());
-      ColumnDescription[] cd = {
+      ColumnInfo[] cd = {
         // as row 3 contains 2.2
-        new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 0),
+        new ColumnInfo("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString()),
         // as all are chars
-        new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1),
+        new ColumnInfo("a", ColumnDescriptionShort.DataTypes.CHAR.toString()),
         // as row 4 contains abc
-        new ColumnDescriptionImpl("10", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+        new ColumnInfo("10", ColumnDescriptionShort.DataTypes.STRING.toString()),
         // although row 1 contains k but it is in header and not counted in detecting datatype
-        new ColumnDescriptionImpl("k", ColumnDescriptionShort.DataTypes.INT.toString(), 3)};
+        new ColumnInfo("k", ColumnDescriptionShort.DataTypes.INT.toString())};
 
       Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
     }
@@ -146,16 +147,16 @@ public class DataParserCSVTest {
       PreviewData pd = dp.parsePreview();
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(4, pd.getHeader().size());
-      ColumnDescription[] cd = {
+      ColumnInfo[] cd = {
         // as row 3 contains 2.2
-        new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 0),
+        new ColumnInfo("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString()),
         // as all are chars
-        new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1),
+        new ColumnInfo("a", ColumnDescriptionShort.DataTypes.CHAR.toString()),
         // some are int, char and some double .. nothing other than 'string' satisfies all the rows
-        new ColumnDescriptionImpl("10", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+        new ColumnInfo("10", ColumnDescriptionShort.DataTypes.STRING.toString()),
         // although row 1 contains k but it is in header and not counted in detecting datatype
         // but row 2 also has a char p which will be acconted for datatype detection
-        new ColumnDescriptionImpl("k", ColumnDescriptionShort.DataTypes.CHAR.toString(), 3)};
+        new ColumnInfo("k", ColumnDescriptionShort.DataTypes.CHAR.toString())};
 
       Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
     }
@@ -184,8 +185,8 @@ public class DataParserCSVTest {
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(1, pd.getPreviewRows().size());
       Assert.assertEquals(2, pd.getHeader().size());
-      ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
-        new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+      ColumnInfo[] cd = {new ColumnInfo("column1", ColumnDescriptionShort.DataTypes.INT.toString()),
+        new ColumnInfo("column2", ColumnDescriptionShort.DataTypes.CHAR.toString())};
 
       Object cols1[] = new Object[2];
       cols1[0] = "1";

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java
index a15e5d4..2c7e5e8 100644
--- a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive20.resources.upload;
 import org.apache.ambari.view.hive20.client.ColumnDescription;
 import org.apache.ambari.view.hive20.client.ColumnDescriptionShort;
 import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
 import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
 import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser;
 import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
@@ -71,20 +72,20 @@ public class DataParserJSONTest {
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(7, pd.getPreviewRows().size()); // header row + preview rows
       Assert.assertEquals(14, pd.getHeader().size());
-      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
-              new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
-              new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
-              new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.STRING.toString(), 3),
-              new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.STRING.toString(), 4),
-              new ColumnDescriptionImpl("col6", ColumnDescriptionShort.DataTypes.STRING.toString(), 5),
-              new ColumnDescriptionImpl("col7", ColumnDescriptionShort.DataTypes.STRING.toString(), 6),
-              new ColumnDescriptionImpl("col8", ColumnDescriptionShort.DataTypes.STRING.toString(), 7),
-              new ColumnDescriptionImpl("col9", ColumnDescriptionShort.DataTypes.STRING.toString(), 8),
-              new ColumnDescriptionImpl("col10", ColumnDescriptionShort.DataTypes.STRING.toString(), 9),
-              new ColumnDescriptionImpl("col11", ColumnDescriptionShort.DataTypes.STRING.toString(), 10),
-              new ColumnDescriptionImpl("col12", ColumnDescriptionShort.DataTypes.STRING.toString(), 11),
-              new ColumnDescriptionImpl("col13", ColumnDescriptionShort.DataTypes.STRING.toString(), 12),
-              new ColumnDescriptionImpl("col14", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 13)};
+      ColumnInfo[] cd = {new ColumnInfo("col1", ColumnDescriptionShort.DataTypes.CHAR.toString()),
+              new ColumnInfo("col2", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col3", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col4", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col5", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col6", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col7", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col8", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col9", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col10", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col11", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col12", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col13", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col14", ColumnDescriptionShort.DataTypes.DOUBLE.toString())};
 
       Row row2 = new Row(new Object[]{"a", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "4.4"});
       Row row3 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "5.4"});
@@ -207,8 +208,8 @@ public class DataParserJSONTest {
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(1, pd.getPreviewRows().size());
       Assert.assertEquals(2, pd.getHeader().size());
-      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
-        new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)};
+      ColumnInfo[] cd = {new ColumnInfo("col1", ColumnDescriptionShort.DataTypes.CHAR.toString()),
+        new ColumnInfo("col2", ColumnDescriptionShort.DataTypes.STRING.toString())};
 
       Object cols1[] = new Object[2];
       cols1[0] = "d";
@@ -246,8 +247,8 @@ public class DataParserJSONTest {
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(1, pd.getPreviewRows().size());
       Assert.assertEquals(2, pd.getHeader().size());
-      ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
-        new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)};
+      ColumnInfo[] cd = {new ColumnInfo("column1", ColumnDescriptionShort.DataTypes.CHAR.toString()),
+        new ColumnInfo("column2", ColumnDescriptionShort.DataTypes.STRING.toString())};
 
       Object cols1[] = new Object[2];
       cols1[0] = "d";

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java
index 07e7c7c..bcdcfc0 100644
--- a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java
@@ -21,7 +21,7 @@ package org.apache.ambari.view.hive20.resources.upload;
 import org.apache.ambari.view.hive20.client.ColumnDescription;
 import org.apache.ambari.view.hive20.client.ColumnDescriptionShort;
 import org.apache.ambari.view.hive20.client.Row;
-import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
 import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser;
 import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
 import org.apache.ambari.view.hive20.resources.uploads.parsers.PreviewData;
@@ -68,11 +68,11 @@ public class DataParserXMLTest {
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(2, pd.getPreviewRows().size()); // header row + preview rows
       Assert.assertEquals(5, pd.getHeader().size());
-      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
-              new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
-              new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
-              new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.INT.toString(), 3),
-              new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.INT.toString(), 4)
+      ColumnInfo[] cd = {new ColumnInfo("col1", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col2", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col3", ColumnDescriptionShort.DataTypes.STRING.toString()),
+              new ColumnInfo("col4", ColumnDescriptionShort.DataTypes.INT.toString()),
+              new ColumnInfo("col5", ColumnDescriptionShort.DataTypes.INT.toString())
       };
 
       Row row2 = new Row(new Object[]{"row1-col1-Value", "row1-col2-Value", "row1-col3-Value", "10", "11"});
@@ -236,8 +236,8 @@ public class DataParserXMLTest {
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(1, pd.getPreviewRows().size());
       Assert.assertEquals(2, pd.getHeader().size());
-      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
-        new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)};
+      ColumnInfo[] cd = {new ColumnInfo("col1", ColumnDescriptionShort.DataTypes.STRING.toString()),
+        new ColumnInfo("col2", ColumnDescriptionShort.DataTypes.INT.toString())};
 
       Object cols1[] = new Object[2];
       cols1[0] = "row1-col1-Value";
@@ -278,8 +278,8 @@ public class DataParserXMLTest {
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(1, pd.getPreviewRows().size());
       Assert.assertEquals(2, pd.getHeader().size());
-      ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
-        new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)};
+      ColumnInfo[] cd = {new ColumnInfo("column1", ColumnDescriptionShort.DataTypes.STRING.toString()),
+        new ColumnInfo("column2", ColumnDescriptionShort.DataTypes.INT.toString())};
 
       Object cols1[] = new Object[2];
       cols1[0] = "row1-col1-Value";

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java
deleted file mode 100644
index 1a0d34a..0000000
--- a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.resources.upload;
-
-import org.apache.ambari.view.hive.resources.uploads.query.RowFormat;
-import org.apache.ambari.view.hive20.client.ColumnDescription;
-import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
-import org.apache.ambari.view.hive20.resources.uploads.HiveFileType;
-import org.apache.ambari.view.hive20.resources.uploads.query.DeleteQueryInput;
-import org.apache.ambari.view.hive20.resources.uploads.query.InsertFromQueryInput;
-import org.apache.ambari.view.hive20.resources.uploads.query.QueryGenerator;
-import org.apache.ambari.view.hive20.resources.uploads.query.TableInfo;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class QueryGeneratorTest {
-  @Test
-  public void testCreateTextFile() {
-
-    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
-    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
-    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
-    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
-    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
-    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
-
-    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.TEXTFILE, new RowFormat(',', '\\'));
-
-    QueryGenerator qg = new QueryGenerator();
-    Assert.assertEquals("Create query for text file not correct ","CREATE TABLE tableName (col1 CHAR(10), col2 STRING," +
-      " col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','" +
-      " ESCAPED BY '\\\\' STORED AS TEXTFILE;",qg.generateCreateQuery(ti));
-  }
-
-  @Test
-  public void testCreateORC() {
-
-    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
-    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
-    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
-    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
-    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
-    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
-
-    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.ORC, new RowFormat(',', '\\'));
-
-    QueryGenerator qg = new QueryGenerator();
-    Assert.assertEquals("Create query for text file not correct ","CREATE TABLE tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) STORED AS ORC;",qg.generateCreateQuery(ti));
-  }
-
-  @Test
-  public void testInsertWithoutUnhexFromQuery() {
-    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
-    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
-    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
-    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
-    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
-    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
-
-    InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable", cdl, Boolean.FALSE);
-
-    QueryGenerator qg = new QueryGenerator();
-    Assert.assertEquals("insert from one table to another not correct ","INSERT INTO TABLE toDB.toTable SELECT col1, col2, col3, col4, col5 FROM fromDB.fromTable;",qg.generateInsertFromQuery(ifqi));
-  }
-
-  @Test
-  public void testInsertWithUnhexFromQuery() {
-    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
-    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
-    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
-    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
-    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
-    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
-
-    InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable", cdl, Boolean.TRUE);
-
-    QueryGenerator qg = new QueryGenerator();
-    Assert.assertEquals("insert from one table to another not correct ","INSERT INTO TABLE toDB.toTable SELECT UNHEX(col1), UNHEX(col2), col3, UNHEX(col4), col5 FROM fromDB.fromTable;",qg.generateInsertFromQuery(ifqi));
-  }
-
-  @Test
-  public void testDropTableQuery() {
-
-    DeleteQueryInput deleteQueryInput = new DeleteQueryInput("dbName","tableName");
-
-    QueryGenerator qg = new QueryGenerator();
-    Assert.assertEquals("drop table query not correct ","DROP TABLE dbName.tableName;",qg.generateDropTableQuery(deleteQueryInput ));
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java
index 340a921..325aed8 100644
--- a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java
@@ -20,6 +20,7 @@ package org.apache.ambari.view.hive20.resources.upload;
 
 import org.apache.ambari.view.hive20.client.ColumnDescription;
 import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
 import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
 import org.apache.ambari.view.hive20.resources.uploads.TableDataReader;
 import org.junit.Assert;
@@ -78,9 +79,9 @@ public class TableDataReaderTest {
   @Test
   public void testCSVReader() throws IOException {
     RowIter rowIter = new RowIter(10,10);
-    List<ColumnDescriptionImpl> colDescs = new LinkedList<>();
+    List<ColumnInfo> colDescs = new LinkedList<>();
     for(int i = 0 ; i < 10 ; i++ ) {
-      ColumnDescriptionImpl cd = new ColumnDescriptionImpl("col" + (i+1) , ColumnDescription.DataTypes.STRING.toString(), i);
+      ColumnInfo cd = new ColumnInfo("col" + (i+1) , ColumnDescription.DataTypes.STRING.toString());
       colDescs.add(cd);
     }
 


[2/2] ambari git commit: AMBARI-19872 : HiveView2.0 : added Upload CSV, JSON, XML to create table feature in the new view (nitirajrathore)

Posted by ni...@apache.org.
AMBARI-19872 : HiveView2.0 : added Upload CSV,JSON, XML to create table feature in the new view (nitirajrathore)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/536192bb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/536192bb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/536192bb

Branch: refs/heads/trunk
Commit: 536192bb601a426c473845dc10776946f456f220
Parents: 239d9c1
Author: Nitiraj Singh Rathore <ni...@gmail.com>
Authored: Tue Feb 7 13:02:34 2017 +0530
Committer: Nitiraj Singh Rathore <ni...@gmail.com>
Committed: Tue Feb 7 13:03:06 2017 +0530

----------------------------------------------------------------------
 .../view/hive20/internal/dto/ColumnInfo.java    |   4 +
 .../generators/InsertFromQueryGenerator.java    |  77 ++
 .../view/hive20/resources/jobs/JobService.java  |   1 +
 .../hive20/resources/uploads/CSVParams.java     |   2 +-
 .../resources/uploads/TableDataReader.java      |   5 +-
 .../hive20/resources/uploads/TableInput.java    |  51 -
 .../resources/uploads/UploadFromHdfsInput.java  |   8 +-
 .../hive20/resources/uploads/UploadService.java | 107 +--
 .../resources/uploads/parsers/Parser.java       |  11 +-
 .../resources/uploads/parsers/PreviewData.java  |   9 +-
 .../uploads/query/InsertFromQueryInput.java     |  10 +-
 .../resources/uploads/query/QueryGenerator.java | 143 ---
 .../resources/uploads/query/RowFormat.java      |  57 --
 .../resources/uploads/query/TableInfo.java      |  97 --
 .../resources/ui/app/adapters/file-uploader.js  |  28 +
 .../resources/ui/app/adapters/upload-table.js   |  93 ++
 .../ui/app/components/csv-format-params.js      |  77 ++
 .../resources/ui/app/components/radio-button.js |  40 +
 .../resources/ui/app/components/simple-table.js |  22 +
 .../ui/app/components/upload-table-source.js    |  48 +
 .../resources/ui/app/components/upload-table.js |  60 ++
 .../ui/app/components/validated-text-field.js   |  62 ++
 .../main/resources/ui/app/configs/helpers.js    |  14 +-
 .../resources/ui/app/locales/en/translations.js | 111 +++
 .../src/main/resources/ui/app/models/column.js  |  23 +-
 .../hive20/src/main/resources/ui/app/router.js  |   2 +-
 .../app/routes/databases/database/tables/new.js |  80 +-
 .../databases/database/tables/upload-table.js   | 925 +++++++++++++++++++
 .../src/main/resources/ui/app/services/jobs.js  |   3 +
 .../ui/app/services/table-operations.js         |   4 +-
 .../templates/components/csv-format-params.hbs  | 118 +++
 .../app/templates/components/radio-button.hbs   |  19 +
 .../app/templates/components/simple-table.hbs   |  42 +
 .../components/upload-table-source.hbs          | 112 +++
 .../app/templates/components/upload-table.hbs   |  59 ++
 .../components/validated-text-field.hbs         |  23 +
 .../templates/databases/database/tables/new.hbs |   6 +-
 .../databases/database/tables/upload-table.hbs  |  45 +
 .../main/resources/ui/app/utils/constants.js    |  64 ++
 .../hive20/src/main/resources/ui/bower.json     |   3 +-
 .../src/main/resources/ui/config/environment.js |   4 +
 .../hive20/src/main/resources/ui/package.json   |   3 +
 .../resources/upload/DataParserCSVTest.java     |  29 +-
 .../resources/upload/DataParserJSONTest.java    |  37 +-
 .../resources/upload/DataParserXMLTest.java     |  20 +-
 .../resources/upload/QueryGeneratorTest.java    | 108 ---
 .../resources/upload/TableDataReaderTest.java   |   5 +-
 47 files changed, 2249 insertions(+), 622 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java
index 44c82a0..5daab91 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java
@@ -30,6 +30,10 @@ public class ColumnInfo {
   private Integer scale;
   private String comment;
 
+  private ColumnInfo(){
+    // for json de-serialization
+  }
+
   public ColumnInfo(String name, String type, Integer precision, Integer scale, String comment) {
     this.name = name;
     this.type = type;

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/InsertFromQueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/InsertFromQueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/InsertFromQueryGenerator.java
new file mode 100644
index 0000000..42cec0a
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/InsertFromQueryGenerator.java
@@ -0,0 +1,77 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.exceptions.ServiceException;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
+import org.apache.ambari.view.hive20.resources.uploads.query.InsertFromQueryInput;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class InsertFromQueryGenerator implements QueryGenerator{
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(InsertFromQueryGenerator.class);
+
+  private InsertFromQueryInput insertFromQueryInput;
+
+  public InsertFromQueryGenerator(InsertFromQueryInput insertFromQueryInput) {
+    this.insertFromQueryInput = insertFromQueryInput;
+  }
+
+  @Override
+  public Optional<String> getQuery() throws ServiceException {
+    StringBuilder insertQuery = new StringBuilder("INSERT INTO TABLE `").append(insertFromQueryInput.getToDatabase()).append(".")
+        .append(insertFromQueryInput.getToTable()).append("`")
+        .append(" SELECT ");
+
+    boolean first = true;
+    for(ColumnInfo column : insertFromQueryInput.getHeader()){
+      String type = column.getType();
+      boolean unhex = insertFromQueryInput.getUnhexInsert() && (
+          ColumnDescription.DataTypes.STRING.toString().equals(type)
+              || ColumnDescription.DataTypes.VARCHAR.toString().equals(type)
+              || ColumnDescription.DataTypes.CHAR.toString().equals(type)
+      );
+
+      if(!first){
+        insertQuery.append(", ");
+      }
+
+      if(unhex) {
+        insertQuery.append("UNHEX(");
+      }
+
+      insertQuery.append(column.getName());
+
+      if(unhex) {
+        insertQuery.append(")");
+      }
+
+      first = false;
+    }
+
+    insertQuery.append(" FROM ").append("`").append(insertFromQueryInput.getFromDatabase()).append(".")
+        .append(insertFromQueryInput.getFromTable()).append("` ").append(";");
+    String query = insertQuery.toString();
+    LOG.info("Insert From Query : {}", query);
+    return Optional.of(query);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java
index 71cedd1..c5479e7 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java
@@ -154,6 +154,7 @@ public class JobService extends BaseService {
     } catch (ItemNotFound itemNotFound) {
       throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
     } catch (Exception ex) {
+      LOG.error("exception while fetching status of job with id : {}", jobId, ex);
       throw new ServiceFormattedException(ex.getMessage(), ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/CSVParams.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/CSVParams.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/CSVParams.java
index 355ed6a..03ed895 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/CSVParams.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/CSVParams.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.view.hive.resources.uploads;
+package org.apache.ambari.view.hive20.resources.uploads;
 
 import java.io.Serializable;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableDataReader.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableDataReader.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableDataReader.java
index ee148b8..2855760 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableDataReader.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableDataReader.java
@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive20.resources.uploads;
 import com.opencsv.CSVWriter;
 import org.apache.ambari.view.hive20.client.ColumnDescription;
 import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
 import org.apache.commons.codec.binary.Hex;
 
 import java.io.IOException;
@@ -38,14 +39,14 @@ import java.util.List;
 public class TableDataReader extends Reader {
 
   private static final int CAPACITY = 1024;
-  private final List<ColumnDescriptionImpl> header;
+  private final List<ColumnInfo> header;
   private StringReader stringReader = new StringReader("");
 
   private Iterator<Row> iterator;
   private boolean encode = false;
   public static final char CSV_DELIMITER = '\001';
 
-  public TableDataReader(Iterator<Row> rowIterator, List<ColumnDescriptionImpl> header, boolean encode) {
+  public TableDataReader(Iterator<Row> rowIterator, List<ColumnInfo> header, boolean encode) {
     this.iterator = rowIterator;
     this.encode = encode;
     this.header = header;

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableInput.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableInput.java
deleted file mode 100644
index 4f15c57..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/TableInput.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.resources.uploads;
-
-import org.apache.ambari.view.hive20.resources.uploads.query.TableInfo;
-
-/**
- * used as input in REST call
- */
-class TableInput extends TableInfo {
-  public Boolean isFirstRowHeader = Boolean.FALSE;
-
-  public TableInput() {
-  }
-
-  public Boolean getIsFirstRowHeader() {
-    return isFirstRowHeader;
-  }
-
-  public void setIsFirstRowHeader(Boolean isFirstRowHeader) {
-    this.isFirstRowHeader = isFirstRowHeader;
-  }
-
-  public void validate(){
-    if( null == this.getHiveFileType()){
-      throw new IllegalArgumentException("fileType parameter cannot be null.");
-    }
-    if( null == this.getTableName()){
-      throw new IllegalArgumentException("tableName parameter cannot be null.");
-    }
-    if( null == this.getDatabaseName()){
-      throw new IllegalArgumentException("databaseName parameter cannot be null.");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadFromHdfsInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadFromHdfsInput.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadFromHdfsInput.java
index c755c02..60955a6 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadFromHdfsInput.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadFromHdfsInput.java
@@ -18,6 +18,8 @@
 
 package org.apache.ambari.view.hive20.resources.uploads;
 
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
+
 import java.io.Serializable;
 import java.util.List;
 
@@ -27,7 +29,7 @@ public class UploadFromHdfsInput implements Serializable{
   private String hdfsPath;
   private String tableName;
   private String databaseName;
-  private List<ColumnDescriptionImpl> header;
+  private List<ColumnInfo> header;
   private boolean containsEndlines;
 
   private String csvDelimiter;
@@ -41,11 +43,11 @@ public class UploadFromHdfsInput implements Serializable{
     return csvDelimiter;
   }
 
-  public List<ColumnDescriptionImpl> getHeader() {
+  public List<ColumnInfo> getHeader() {
     return header;
   }
 
-  public void setHeader(List<ColumnDescriptionImpl> header) {
+  public void setHeader(List<ColumnInfo> header) {
     this.header = header;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadService.java
index 835626d..3164da0 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadService.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/UploadService.java
@@ -18,16 +18,22 @@
 
 package org.apache.ambari.view.hive20.resources.uploads;
 
+import com.google.common.base.Optional;
 import com.sun.jersey.core.header.FormDataContentDisposition;
 import com.sun.jersey.multipart.FormDataParam;
 import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive.resources.uploads.CSVParams;
 import org.apache.ambari.view.hive20.BaseService;
 import org.apache.ambari.view.hive20.ConnectionFactory;
 import org.apache.ambari.view.hive20.ConnectionSystem;
 import org.apache.ambari.view.hive20.client.DDLDelegator;
 import org.apache.ambari.view.hive20.client.DDLDelegatorImpl;
 import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.exceptions.ServiceException;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.apache.ambari.view.hive20.internal.query.generators.CreateTableQueryGenerator;
+import org.apache.ambari.view.hive20.internal.query.generators.DeleteTableQueryGenerator;
+import org.apache.ambari.view.hive20.internal.query.generators.InsertFromQueryGenerator;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobController;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
@@ -37,8 +43,6 @@ import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
 import org.apache.ambari.view.hive20.resources.uploads.parsers.PreviewData;
 import org.apache.ambari.view.hive20.resources.uploads.query.DeleteQueryInput;
 import org.apache.ambari.view.hive20.resources.uploads.query.InsertFromQueryInput;
-import org.apache.ambari.view.hive20.resources.uploads.query.QueryGenerator;
-import org.apache.ambari.view.hive20.resources.uploads.query.TableInfo;
 import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
 import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
 import org.apache.ambari.view.utils.ambari.AmbariApi;
@@ -52,11 +56,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import javax.inject.Inject;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.*;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import java.io.File;
@@ -163,7 +163,7 @@ public class UploadService extends BaseService {
     }
   }
 
-  @POST
+  @PUT
   @Path("/preview")
   @Consumes(MediaType.MULTIPART_FORM_DATA)
   public Response uploadForPreview(
@@ -225,29 +225,6 @@ public class UploadService extends BaseService {
   }
 
 
-  @Path("/createTable")
-  @POST
-  @Consumes(MediaType.APPLICATION_JSON)
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response createTable(TableInput tableInput) {
-    try {
-      tableInput.validate();
-      String databaseName = tableInput.getDatabaseName();
-      String tableCreationQuery = generateCreateQuery(tableInput);
-      LOG.info("tableCreationQuery : {}", tableCreationQuery);
-
-      Job job = createJob(tableCreationQuery, databaseName);
-      LOG.info("job created for table creation {}", job);
-      return Response.ok(job).build();
-    } catch (WebApplicationException e) {
-      LOG.error(getErrorMessage(e), e);
-      throw e;
-    } catch (Throwable e) {
-      LOG.error(e.getMessage(), e);
-      throw new ServiceFormattedException(e);
-    }
-  }
-
   @Path("/uploadFromHDFS")
   @POST
   @Consumes(MediaType.APPLICATION_JSON)
@@ -281,7 +258,7 @@ public class UploadService extends BaseService {
   }
 
   @Path("/upload")
-  @POST
+  @PUT
   @Consumes(MediaType.MULTIPART_FORM_DATA)
   @Produces(MediaType.APPLICATION_JSON)
   public Response uploadFile(
@@ -301,7 +278,7 @@ public class UploadService extends BaseService {
     try {
       CSVParams csvParams = getCsvParams(csvDelimiter, csvQuote, csvEscape);
       ObjectMapper mapper = new ObjectMapper();
-      List<ColumnDescriptionImpl> columnList = mapper.readValue(header, new TypeReference<List<ColumnDescriptionImpl>>(){});
+      List<ColumnInfo> columnList = mapper.readValue(header, new TypeReference<List<ColumnInfo>>(){});
       String path = uploadFileFromStream(uploadedInputStream, isFirstRowHeader, inputFileType, tableName, databaseName, columnList, containsEndlines, csvParams);
 
       JSONObject jo = new JSONObject();
@@ -325,7 +302,9 @@ public class UploadService extends BaseService {
       String insertQuery = generateInsertFromQuery(input);
       LOG.info("insertQuery : {}", insertQuery);
 
-      Job job = createJob(insertQuery, "default");
+      Job job = createJob(insertQuery, input.getFromDatabase(), "Insert from " +
+              input.getFromDatabase() + "." + input.getFromTable() + " to " +
+              input.getToDatabase() + "." + input.getToTable());
       LOG.info("Job created for insert from temp table : {}", job);
       return Response.ok(job).build();
     } catch (WebApplicationException e) {
@@ -337,27 +316,6 @@ public class UploadService extends BaseService {
     }
   }
 
-  @Path("/deleteTable")
-  @POST
-  @Consumes(MediaType.APPLICATION_JSON)
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response deleteTable(DeleteQueryInput input) {
-    try {
-      String deleteQuery = generateDeleteQuery(input);
-      LOG.info("deleteQuery : {}", deleteQuery);
-
-      Job job = createJob(deleteQuery, "default");
-      LOG.info("Job created for delete temp table : {} ", job);
-      return Response.ok(job).build();
-    } catch (WebApplicationException e) {
-      LOG.error(getErrorMessage(e), e);
-      throw e;
-    } catch (Throwable e) {
-      LOG.error(e.getMessage(), e);
-      throw new ServiceFormattedException(e);
-    }
-  }
-
   private String uploadIntoTable(Reader reader, String databaseName, String tempTableName) {
     try {
       String fullPath = getHiveMetaStoreLocation(databaseName, tempTableName);
@@ -388,23 +346,42 @@ public class UploadService extends BaseService {
     return ambariApi;
   }
 
-  private String generateCreateQuery(TableInfo ti) {
-    return new QueryGenerator().generateCreateQuery(ti);
+  private String generateCreateQuery(TableMeta ti) throws ServiceException {
+    CreateTableQueryGenerator createTableQueryGenerator = new CreateTableQueryGenerator(ti);
+    Optional<String> query = createTableQueryGenerator.getQuery();
+    if(query.isPresent()){
+      return query.get();
+    }else{
+      throw new ServiceException("Failed to generate create table query.");
+    }
   }
 
-  private String generateInsertFromQuery(InsertFromQueryInput input) {
-    return new QueryGenerator().generateInsertFromQuery(input);
+  private String generateInsertFromQuery(InsertFromQueryInput input) throws ServiceException {
+    InsertFromQueryGenerator queryGenerator = new InsertFromQueryGenerator(input);
+    Optional<String> query = queryGenerator.getQuery();
+    if(query.isPresent()){
+      return query.get();
+    }else{
+      throw new ServiceException("Failed to generate Insert From Query.");
+    }
   }
 
-  private String generateDeleteQuery(DeleteQueryInput deleteQueryInput) {
-    return new QueryGenerator().generateDropTableQuery(deleteQueryInput);
+  private String generateDeleteQuery(DeleteQueryInput deleteQueryInput) throws ServiceException {
+    DeleteTableQueryGenerator deleteQuery = new DeleteTableQueryGenerator(deleteQueryInput.getDatabase(), deleteQueryInput.getTable());
+    Optional<String> query = deleteQuery.getQuery();
+    if(query.isPresent()){
+      return query.get();
+    }else{
+      throw new ServiceException("Failed to generate delete table query.");
+    }
   }
 
-  private Job createJob(String query, String databaseName) throws Throwable{
+  private Job createJob(String query, String databaseName, String jobTitle) throws Throwable{
     Map jobInfo = new HashMap<>();
-    jobInfo.put("title", "Internal Job");
+    jobInfo.put("title", jobTitle);
     jobInfo.put("forcedContent", query);
     jobInfo.put("dataBase", databaseName);
+    jobInfo.put("referrer", JobImpl.REFERRER.INTERNAL.name());
 
     Job job = new JobImpl(jobInfo);
     LOG.info("creating job : {}", job);
@@ -523,7 +500,7 @@ public class UploadService extends BaseService {
     String inputFileType,   // the format of the file uploaded. CSV/JSON etc.
     String tableName,
     String databaseName,
-    List<ColumnDescriptionImpl> header,
+    List<ColumnInfo> header,
     boolean containsEndlines,
     CSVParams csvParams
   ) throws Exception {

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/Parser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/Parser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/Parser.java
index a012463..5586f8c 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/Parser.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/Parser.java
@@ -20,6 +20,7 @@ package org.apache.ambari.view.hive20.resources.uploads.parsers;
 
 import org.apache.ambari.view.hive20.client.ColumnDescription;
 import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
 import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -74,7 +75,7 @@ public abstract class Parser implements IParser {
     LOG.info("generating preview for : {}", this.parseOptions );
 
     ArrayList<Row> previewRows;
-    List<ColumnDescription> header;
+    List<ColumnInfo> header;
 
     try {
       numberOfPreviewRows = (Integer) parseOptions.getOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS);
@@ -137,11 +138,11 @@ public abstract class Parser implements IParser {
     // find data types.
     header = generateHeader(headerRow,previewRows,numOfCols);
 
-    return new PreviewData(header,previewRows);
+    return new PreviewData(header, previewRows);
   }
 
-  private List<ColumnDescription> generateHeader(Row headerRow,List<Row> previewRows, int numOfCols) {
-    List<ColumnDescription> header = new ArrayList<>();
+  private List<ColumnInfo> generateHeader(Row headerRow, List<Row> previewRows, int numOfCols) {
+    List<ColumnInfo> header = new ArrayList<>();
 
     for (int colNum = 0; colNum < numOfCols; colNum++) {
       ColumnDescription.DataTypes type = getLikelyDataType(previewRows,colNum);
@@ -151,7 +152,7 @@ public abstract class Parser implements IParser {
       if (null != headerRow)
         colName = (String) headerRow.getRow()[colNum];
 
-      ColumnDescription cd = new ColumnDescriptionImpl(colName, type.toString(), colNum);
+      ColumnInfo cd = new ColumnInfo(colName, type.toString());
       header.add(cd);
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/PreviewData.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/PreviewData.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/PreviewData.java
index 50af529..dd31457 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/PreviewData.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/parsers/PreviewData.java
@@ -20,6 +20,7 @@ package org.apache.ambari.view.hive20.resources.uploads.parsers;
 
 import org.apache.ambari.view.hive20.client.ColumnDescription;
 import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
 
 import java.util.List;
 
@@ -27,22 +28,22 @@ import java.util.List;
  * Encapsulating preview data from parser.
  */
 public class PreviewData {
-  private List<ColumnDescription> header;
+  private List<ColumnInfo> header;
   private List<Row> previewRows;
 
   public PreviewData() {
   }
 
-  public PreviewData(List<ColumnDescription> header, List<Row> previewRows) {
+  public PreviewData(List<ColumnInfo> header, List<Row> previewRows) {
     this.header = header;
     this.previewRows = previewRows;
   }
 
-  public List<ColumnDescription> getHeader() {
+  public List<ColumnInfo> getHeader() {
     return header;
   }
 
-  public void setHeader(List<ColumnDescription> header) {
+  public void setHeader(List<ColumnInfo> header) {
     this.header = header;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/InsertFromQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/InsertFromQueryInput.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/InsertFromQueryInput.java
index 027baff..4ff61b4 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/InsertFromQueryInput.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/InsertFromQueryInput.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.view.hive20.resources.uploads.query;
 
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
 import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
 
 import java.util.List;
@@ -27,13 +28,14 @@ public class InsertFromQueryInput {
   private String fromTable;
   private String toDatabase;
   private String toTable;
-  private List<ColumnDescriptionImpl> header;
+  private List<ColumnInfo> header;
   private Boolean unhexInsert = Boolean.FALSE;
 
   public InsertFromQueryInput() {
   }
 
-  public InsertFromQueryInput(String fromDatabase, String fromTable, String toDatabase, String toTable, List<ColumnDescriptionImpl> header, Boolean unhexInsert) {
+  public InsertFromQueryInput(String fromDatabase, String fromTable, String toDatabase, String toTable,
+                              List<ColumnInfo> header, Boolean unhexInsert) {
     this.fromDatabase = fromDatabase;
     this.fromTable = fromTable;
     this.toDatabase = toDatabase;
@@ -42,11 +44,11 @@ public class InsertFromQueryInput {
     this.unhexInsert = unhexInsert;
   }
 
-  public List<ColumnDescriptionImpl> getHeader() {
+  public List<ColumnInfo> getHeader() {
     return header;
   }
 
-  public void setHeader(List<ColumnDescriptionImpl> header) {
+  public void setHeader(List<ColumnInfo> header) {
     this.header = header;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/QueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/QueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/QueryGenerator.java
deleted file mode 100644
index dda178d..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/QueryGenerator.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.resources.uploads.query;
-
-import org.apache.ambari.view.hive.resources.uploads.query.RowFormat;
-import org.apache.ambari.view.hive20.client.ColumnDescription;
-import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
-import org.apache.ambari.view.hive20.resources.uploads.HiveFileType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-
-/**
- * generates the sql query from given data
- */
-public class QueryGenerator {
-  protected final static Logger LOG =
-          LoggerFactory.getLogger(QueryGenerator.class);
-
-  public String generateCreateQuery(TableInfo tableInfo) {
-    String tableName = tableInfo.getTableName();
-    List<ColumnDescriptionImpl> cdList = tableInfo.getHeader();
-
-    StringBuilder query = new StringBuilder();
-    query.append("CREATE TABLE ").append(tableName).append(" (");
-    Collections.sort(cdList, new Comparator<ColumnDescription>() {
-      @Override
-      public int compare(ColumnDescription o1, ColumnDescription o2) {
-        return o1.getPosition() - o2.getPosition();
-      }
-    });
-
-    boolean first = true;
-    for (ColumnDescriptionImpl cd : cdList) {
-      if (first) {
-        first = false;
-      } else {
-        query.append(", ");
-      }
-
-      query.append(cd.getName()).append(" ").append(cd.getType());
-      if (cd.getPrecision() != null) {
-        query.append("(").append(cd.getPrecision());
-        if (cd.getScale() != null) {
-          query.append(",").append(cd.getScale());
-        }
-        query.append(")");
-      }
-
-    }
-
-    query.append(")");
-
-    if(tableInfo.getHiveFileType().equals(HiveFileType.TEXTFILE)) {
-      query.append(getRowFormatQuery(tableInfo.getRowFormat()));
-    }
-    query.append(" STORED AS ").append(tableInfo.getHiveFileType().toString());
-    String queryString = query.append(";").toString();
-    LOG.info("Query : {}", queryString);
-    return queryString;
-  }
-
-  private String getRowFormatQuery(RowFormat rowFormat) {
-    StringBuilder sb = new StringBuilder();
-    if(rowFormat != null) {
-      sb.append(" ROW FORMAT DELIMITED");
-      if(rowFormat.getFieldsTerminatedBy() != null ){
-        sb.append(" FIELDS TERMINATED BY '").append(rowFormat.getFieldsTerminatedBy()).append('\'');
-      }
-      if(rowFormat.getEscapedBy() != null){
-        String escape = String.valueOf(rowFormat.getEscapedBy());
-        if(rowFormat.getEscapedBy() == '\\'){
-          escape = escape + '\\'; // special handling of slash as its escape char for strings in hive as well.
-        }
-        sb.append(" ESCAPED BY '").append(escape).append('\'');
-      }
-    }
-
-    return sb.toString();
-  }
-
-  public String generateInsertFromQuery(InsertFromQueryInput ifqi) {
-    StringBuilder insertQuery = new StringBuilder("INSERT INTO TABLE ").append(ifqi.getToDatabase()).append(".")
-                                .append(ifqi.getToTable()).append(" SELECT ");
-
-    boolean first = true;
-    for(ColumnDescriptionImpl column : ifqi.getHeader()){
-      String type = column.getType();
-      boolean unhex = ifqi.getUnhexInsert() && (
-        ColumnDescription.DataTypes.STRING.toString().equals(type)
-          || ColumnDescription.DataTypes.VARCHAR.toString().equals(type)
-          || ColumnDescription.DataTypes.CHAR.toString().equals(type)
-      );
-
-      if(!first){
-        insertQuery.append(", ");
-      }
-
-      if(unhex) {
-        insertQuery.append("UNHEX(");
-      }
-
-      insertQuery.append(column.getName());
-
-      if(unhex) {
-        insertQuery.append(")");
-      }
-
-      first = false;
-    }
-
-    insertQuery.append(" FROM ").append(ifqi.getFromDatabase()).append(".").append(ifqi.getFromTable()).append(";");
-    String query = insertQuery.toString();
-    LOG.info("Insert Query : {}", query);
-    return query;
-  }
-
-  public String generateDropTableQuery(DeleteQueryInput deleteQueryInput) {
-    String dropQuery = new StringBuilder("DROP TABLE ").append(deleteQueryInput.getDatabase())
-                      .append(".").append(deleteQueryInput.getTable()).append(";").toString();
-    LOG.info("Drop Query : {}", dropQuery);
-    return dropQuery;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/RowFormat.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/RowFormat.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/RowFormat.java
deleted file mode 100644
index 4c1cb2b..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/RowFormat.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.uploads.query;
-
-public class RowFormat {
-  private Character fieldsTerminatedBy;
-  private Character escapedBy;
-
-  private RowFormat() {
-  }
-
-  public RowFormat(Character fieldsTerminatedBy, Character escapedBy) {
-    this.fieldsTerminatedBy = fieldsTerminatedBy;
-    this.escapedBy = escapedBy;
-  }
-
-  public Character getFieldsTerminatedBy() {
-    return fieldsTerminatedBy;
-  }
-
-  public void setFieldsTerminatedBy(Character fieldsTerminatedBy) {
-    this.fieldsTerminatedBy = fieldsTerminatedBy;
-  }
-
-  public Character getEscapedBy() {
-    return escapedBy;
-  }
-
-  public void setEscapedBy(Character escapedBy) {
-    this.escapedBy = escapedBy;
-  }
-
-  @Override
-  public String toString() {
-    StringBuilder sb = new StringBuilder("RowFormat{ fieldsTerminatedBy='");
-    sb.append(fieldsTerminatedBy).append( '\'').append(", escapedBy='")
-      .append(escapedBy).append("\'}");
-
-    return sb.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/TableInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/TableInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/TableInfo.java
deleted file mode 100644
index bb39271..0000000
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/uploads/query/TableInfo.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.resources.uploads.query;
-
-import org.apache.ambari.view.hive.resources.uploads.query.RowFormat;
-import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
-import org.apache.ambari.view.hive20.resources.uploads.HiveFileType;
-
-import java.io.Serializable;
-import java.util.List;
-
-/**
- * used as input in Query generation
- */
-public class TableInfo implements Serializable{
-  private String tableName;
-  private String databaseName;
-  private List<ColumnDescriptionImpl> header;
-  private HiveFileType hiveFileType;
-
-  private RowFormat rowFormat;
-
-  public String getTableName() {
-    return tableName;
-  }
-
-  public void setTableName(String tableName) {
-    this.tableName = tableName;
-  }
-
-  public String getDatabaseName() {
-    return databaseName;
-  }
-
-  public void setDatabaseName(String databaseName) {
-    this.databaseName = databaseName;
-  }
-
-  public List<ColumnDescriptionImpl> getHeader() {
-    return header;
-  }
-
-  public void setHeader(List<ColumnDescriptionImpl> header) {
-    this.header = header;
-  }
-
-  public HiveFileType getHiveFileType() {
-    return hiveFileType;
-  }
-
-  public void setHiveFileType(HiveFileType hiveFileType) {
-    this.hiveFileType = hiveFileType;
-  }
-
-  public RowFormat getRowFormat() {
-    return rowFormat;
-  }
-
-  public void setRowFormat(RowFormat rowFormat) {
-    this.rowFormat = rowFormat;
-  }
-
-  public TableInfo(String databaseName, String tableName, List<ColumnDescriptionImpl> header, HiveFileType hiveFileType, RowFormat rowFormat) {
-    this.databaseName = databaseName;
-    this.tableName = tableName;
-    this.header = header;
-    this.hiveFileType = hiveFileType;
-    this.rowFormat = rowFormat;
-  }
-
-  public TableInfo(TableInfo tableInfo) {
-    this.tableName = tableInfo.tableName;
-    this.databaseName = tableInfo.databaseName;
-    this.header = tableInfo.header;
-    this.hiveFileType = tableInfo.hiveFileType;
-    this.rowFormat = tableInfo.rowFormat;
-  }
-
-  public TableInfo() {
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/adapters/file-uploader.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/file-uploader.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/file-uploader.js
new file mode 100644
index 0000000..34f52b1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/file-uploader.js
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import EmberUploader from 'ember-uploader';
+
+export default EmberUploader.Uploader.extend({
+  method: 'PUT',
+  ajaxSettings: {
+    headers: {
+      "X-Requested-By":"ambari"
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/adapters/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/adapters/upload-table.js b/contrib/views/hive20/src/main/resources/ui/app/adapters/upload-table.js
new file mode 100644
index 0000000..3bfe15b
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/adapters/upload-table.js
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import ApplicationAdapter from './application';
+import FileUploader from './file-uploader';
+
+export default ApplicationAdapter.extend({
+  tableOperations: Ember.inject.service(),
+
+  buildURL: function(){
+    return this._super(...arguments);
+  },
+
+  buildUploadURL: function (path) {
+    return  this.buildURL() + "/upload/" + path;
+  },
+
+  uploadFiles: function (path, files, extras) {
+    var uploadUrl = this.buildUploadURL(path);
+
+    console.log("uplaoder : uploadURL : ", uploadUrl, " extras : ", extras , "files : ", files);
+
+    var hdrs = Ember.$.extend(true, {},this.get('headers'));
+    delete hdrs['Content-Type'];
+    var uploader = FileUploader.create({
+      headers: hdrs,
+      url: uploadUrl
+    });
+
+    if (!Ember.isEmpty(files)) {
+      var promise = uploader.upload(files[0], extras);
+      return promise;
+    }
+  },
+
+  createTable: function (tableData) {
+    console.log("creating table with data :", tableData);
+    return this.doPost("createTable",tableData);
+  },
+
+  insertIntoTable: function(insertData){
+    console.log("inserting into table with data : ", insertData);
+    return this.doPost("insertIntoTable",insertData);
+  },
+
+  deleteTable: function(deleteData){
+    console.log("delete table with info : ", deleteData);
+    return this.get('tableOperations').deleteTable(deleteData.database, deleteData.table);
+  },
+
+  doPost : function(path,inputData){
+    var self = this;
+    return new Ember.RSVP.Promise(function(resolve,reject){
+                 Ember.$.ajax({
+                     url :  self.buildUploadURL(path),
+                     type : 'post',
+                     data: JSON.stringify(inputData),
+                     headers: self.get('headers'),
+                     dataType : 'json'
+                 }).done(function(data) {
+                     resolve(data);
+                 }).fail(function(error) {
+                     reject(error);
+                 });
+              });
+  },
+
+  previewFromHDFS : function(previewFromHdfsData){
+    console.log("preview from hdfs with info : ", previewFromHdfsData);
+    return this.doPost("previewFromHdfs",previewFromHdfsData)
+  },
+
+  uploadFromHDFS : function(uploadFromHdfsData){
+    console.log("upload from hdfs with info : ", uploadFromHdfsData);
+    return this.doPost("uploadFromHDFS",uploadFromHdfsData)
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/components/csv-format-params.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/csv-format-params.js b/contrib/views/hive20/src/main/resources/ui/app/components/csv-format-params.js
new file mode 100644
index 0000000..7a14ba8
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/csv-format-params.js
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import Helpers from '../configs/helpers';
+
+export default Ember.Component.extend({
+  showCSVFormatInput: true,
+  DEFAULT_CSV_DELIMITER: ',',
+  DEFAULT_CSV_QUOTE: '"',
+  DEFAULT_CSV_ESCAPE: '\\',
+  DEFAULT_FILE_TYPE: 'CSV',
+  isFirstRowHeader: false, // is first row  header
+  csvParams: Ember.Object.create(),
+  inputFileTypes: Ember.computed(function () {
+    return Helpers.getUploadFileTypes();
+  }),
+  inputFileTypeCSV : Ember.computed.equal('fileFormatInfo.inputFileType.id',"CSV"),
+
+  terminationChars: Ember.computed(function () {
+    return Helpers.getAllTerminationCharacters();
+  }),
+
+  init: function(){
+    this._super(...arguments);
+    this.set('fileFormatInfo.csvParams.csvDelimiter',  this.get("terminationChars").findBy( "name", this.get('DEFAULT_CSV_DELIMITER') ));
+    this.set('fileFormatInfo.csvParams.csvQuote', this.get("terminationChars").findBy( "name",  this.get('DEFAULT_CSV_QUOTE')));
+    this.set('fileFormatInfo.csvParams.csvEscape', this.get("terminationChars").findBy( "name",  this.get('DEFAULT_CSV_ESCAPE')));
+    this.set("fileFormatInfo.inputFileType", this.get("inputFileTypes").findBy("name"),  this.get('DEFAULT_FILE_TYPE'));
+  },
+
+  actions: {
+    toggleCSVFormat: function () {
+      console.log("inside toggleCSVFormat");
+      this.toggleProperty('showCSVFormatInput');
+    },
+    clearColumnDelimter: function(){
+      this.set('fileFormatInfo.csvParams.csvDelimiter');
+    },
+    csvDelimiterSelected: function(terminator){
+      this.set('fileFormatInfo.csvParams.csvDelimiter', terminator);
+    },
+    csvEscapeSelected: function(terminator){
+      this.set('fileFormatInfo.csvParams.csvEscape', terminator);
+    },
+    clearEscapeCharacter: function(){
+      this.set('fileFormatInfo.csvParams.csvEscape');
+    },
+    csvQuoteSelected: function(terminator){
+      this.set('fileFormatInfo.csvParams.csvQuote', terminator);
+    },
+    clearCsvQuote: function(){
+      this.set('fileFormatInfo.csvParams.csvQuote');
+    },
+    inputFileTypeSelected: function(fileType){
+      this.set("fileFormatInfo.inputFileType", fileType);
+    },
+    clearInputFileType: function(){
+      this.set("fileFormatInfo.inputFileType");
+    },
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/components/radio-button.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/radio-button.js b/contrib/views/hive20/src/main/resources/ui/app/components/radio-button.js
new file mode 100644
index 0000000..066168c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/radio-button.js
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  tagName: 'input',
+  type: 'radio',
+  attributeBindings: ['type', 'htmlChecked:checked', 'value', 'name', 'disabled'],
+
+  htmlChecked: function() {
+    return this.get('value') === this.get('checked');
+  }.property('value', 'checked'),
+
+  change: function() {
+    console.log("value changed : ", this.get('value'));
+    this.set('checked', this.get('value'));
+  },
+
+  _updateElementValue: function() {
+    Ember.run.next(this, function() {
+      this.$().prop('checked', this.get('htmlChecked'));
+    });
+  }.observes('htmlChecked')
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/components/simple-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/simple-table.js b/contrib/views/hive20/src/main/resources/ui/app/components/simple-table.js
new file mode 100644
index 0000000..8828275
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/simple-table.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/components/upload-table-source.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/upload-table-source.js b/contrib/views/hive20/src/main/resources/ui/app/components/upload-table-source.js
new file mode 100644
index 0000000..36da77d
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/upload-table-source.js
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  showFileSourceInput: true,
+  showHdfsLocationInput: Ember.computed.equal("fileInfo.uploadSource", "hdfs"),
+  showLocalLocationInput: Ember.computed.equal("fileInfo.uploadSource", "local"),
+  isLocalUpload: Ember.computed.equal("fileInfo.uploadSource", "local"),
+
+  actions: {
+    toggleFileSource: function(){
+      this.toggleProperty("showFileSourceInput");
+    },
+    closeHdfsModal: function() {
+      this.set('showDirectoryViewer', false);
+    },
+    hdfsPathSelected: function(path) {
+      this.set('fileInfo.hdfsPath', path);
+      this.set('showDirectoryViewer', false);
+    },
+
+    toggleDirectoryViewer: function() {
+      this.set('showDirectoryViewer', true);
+    },
+    onFileChanged: function(file){
+      this.get("fileInfo.files")[0] = file;
+      console.log("setting fifilesUploadedles as : ", this.get("fileInfo.files"));
+      this.sendAction("onFileChanged");
+    },
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/components/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/upload-table.js b/contrib/views/hive20/src/main/resources/ui/app/components/upload-table.js
new file mode 100644
index 0000000..29e9891
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/upload-table.js
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Component.extend({
+  showPreview: false,
+  fileFormatInfo: Ember.Object.create({
+    csvParams: Ember.Object.create(),
+    inputFileType: null,
+  }),
+  fileInfo: Ember.Object.create({
+    files: Ember.A(),
+    hdfsPath: null,
+    uploadSource: null,
+  }),
+  tableMeta: Ember.Object.create(),
+  actions: {
+    onFileChanged: function () {
+      console.log("inside files changed");
+      console.log("fileFormatInfo : ", this.get("fileFormatInfo"));
+      console.log("fileInfo : ", this.get("fileInfo"));
+      console.log("tableInfo : ", this.get("tableInfo"));
+      this.send("preview");
+    },
+    preview: function () {
+      let sourceObject = Ember.Object.create();
+      sourceObject.set("fileFormatInfo", this.get("fileFormatInfo"));
+      sourceObject.set("fileInfo", this.get("fileInfo"));
+      this.sendAction("preview", sourceObject);
+      this.set("showPreview", true);
+    },
+    toggleShowPreview: function(){
+      this.toggleProperty("showPreview");
+    },
+    createAndUpload: function(tableMeta){
+      this.set("tableMeta", tableMeta);
+      let tableData = Ember.Object.create();
+      tableData.set("fileFormatInfo", this.get("fileFormatInfo"));
+      tableData.set("fileInfo", this.get("fileInfo"));
+      tableData.set("tableMeta", this.get("tableMeta"));
+      this.sendAction("createAndUpload", tableData);
+    }
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/components/validated-text-field.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/validated-text-field.js b/contrib/views/hive20/src/main/resources/ui/app/components/validated-text-field.js
new file mode 100644
index 0000000..2379a15
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/validated-text-field.js
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+import Ember from 'ember';
+
+/** Example :
+ * {{#validated-text-field
+ * inputValue=bindedTextValue invalidClass='form-control red-border' validClass='form-control' regex="^[a-z]+$"
+ * allowEmpty=false tooltip="Enter valid word" errorMessage="Please enter valid word" placeholder="Enter Word"}}
+ * {{/validated-text-field}}
+ */
+export default Ember.Component.extend({
+  classNameBindings: ['tagClassName'],
+  tagClassName : false, // set it to non false value if you want a specific class to be assigned
+  allowEmpty: true,
+  valid: true,
+  setValid: function () {
+    this.set("valid", true);
+    this.set("inputClass", this.get("validClass"));
+    this.set("message", this.get("tooltip"));
+  },
+  setInvalid: function () {
+    this.set("valid", false);
+    this.set("inputClass", this.get("invalidClass"));
+    this.set("message", this.get("errorMessage"));
+  },
+  onChangeInputValue: function () {
+    var regStr = this.get("regex");
+    var regExp = new RegExp(regStr, "g");
+    if (this.get("inputValue")) {
+      var arr = this.get("inputValue").match(regExp);
+      if (arr != null && arr.length == 1) {
+        this.setValid();
+      }
+      else {
+        this.setInvalid();
+      }
+    } else {
+      if (this.get("allowEmpty")) {
+        this.setValid();
+      } else {
+        this.setInvalid();
+      }
+    }
+  }.observes("inputValue").on('init')
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/configs/helpers.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/configs/helpers.js b/contrib/views/hive20/src/main/resources/ui/app/configs/helpers.js
index 025caa6..ecefe55 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/configs/helpers.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/configs/helpers.js
@@ -142,10 +142,22 @@ export default Ember.Object.create({
    */
   getAllTerminationCharacters() {
     let arr = Ember.copy(nonPrintableChars);
-    for(let i=33; i < 127; i++) {
+    for (let i = 33; i < 127; i++) {
       arr.pushObject({id: i.toString(), name: String.fromCodePoint(i)});
     }
     return arr;
+  },
+
+  /**
+   * returns file types supported for upload-table feature.
+   */
+  getUploadFileTypes() {
+    let arr = [
+      {id: "CSV", name: "CSV"},
+      {id: "JSON", name: "JSON"},
+      {id: "XML", name: "XML"}
+    ];
+    return Ember.copy(arr);
   }
 
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/locales/en/translations.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/locales/en/translations.js b/contrib/views/hive20/src/main/resources/ui/app/locales/en/translations.js
new file mode 100644
index 0000000..b550dbe
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/locales/en/translations.js
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export default {
+  "hive": {
+    "ui": {
+      "fileSource": {
+        'uploadFromLocal': "Upload from Local",
+        'uploadFromHdfs': "Upload from HDFS",
+        'selectFileType': "Select File Type",
+        'fileType': "File type",
+        "selectHdfsLocation": "Select HDFS Directory",
+        "enterHdfsPathLabel": "Enter Hdfs Path",
+        "selectLocalFileLabel": "Select Local File",
+      },
+      "csvFormatParams": {
+        'columnDelimterField': "Field Delimiter",
+        'columnDelimiterTooltip': "Delimiter for the column values. Default is comman (,).",
+        'escapeCharacterField': "Escape Character",
+        'escapeCharacterTooltip': "Escape character. Default is backslash (\).",
+        'quoteCharacterTooltip': 'Quote character. Default is double quote (").',
+        'quoteCharacterField': "Quote Character",
+      },
+      "uploadTable": {
+        'uploadProgress': "Upload Progress",
+        'uploading': "Uploading..",
+        'selectFromLocal': "Select from local",
+        'hdfsPath': "HDFS Path",
+        'selectDatabase': "Select a Database",
+        'tableName': "Table name",
+        'tableNameErrorMessage': "Only alphanumeric and underscore characters are allowed in table name.",
+        'tableNameTooltip': "Enter valid (alphanumeric + underscore) table name.",
+        'storedAs': "Stored as",
+        'isFirstRowHeader': "Is first row header ?",
+        'columnNameTooltip': "Enter valid (alphanumeric + underscore) column name.",
+        'columnNameErrorMessage': "Only alphanumeric and underscore characters are allowed in column names.",
+        'hdfsFieldTooltip': "Enter full HDFS path",
+        'hdfsFieldPlaceholder': "Enter full HDFS path",
+        'hdfsFieldErrorMessage': "Please enter complete path of hdfs file to upload.",
+        'containsEndlines': "Contains endlines?",
+        'fieldsTerminatedByField': "Fields Terminated By",
+        'escapedByField': "Escape By",
+        'escapedByTooltip': "Escaped By character for Hive table.",
+        'fieldsTerminatedByTooltip': "Fields Terminated By character for Hive table.",
+        'isFirstRowHeaderTooltip': "Check if the first row of CSV is a header.",
+        'showPreview': "Preview"
+      }
+    },
+    words :{
+      temporary : "Temporary",
+      actual : "Actual",
+      database : "Database"
+    },
+    errors: {
+      'no.query': "No query to process.",
+      'emptyDatabase': "Please select {{ database }}.",
+      'emptyTableName': "Please enter {{ tableNameField }}.",
+      'illegalTableName': "Illegal {{ tableNameField }} : '{{ tableName }}'",
+      'emptyIsFirstRow': "{{isFirstRowHeaderField}} cannot be null.",
+      'emptyHeaders': "Headers (containing column names) cannot be null.",
+      'emptyColumnName': "Column name cannot be null.",
+      'illegalColumnName': "Illegal column name : '{{columnName}}' in column number {{index}}",
+      'emptyHdfsPath': "HdfsPath Name cannot be null or empty.",
+      'illegalHdfPath': "Illegal hdfs path : {{hdfsPath}}"
+    },
+    messages: {
+      'generatingPreview': "Generating Preview.",
+      'startingToCreateActualTable': "Creating Actual table",
+      'waitingToCreateActualTable': "Waiting for creation of Actual table",
+      'successfullyCreatedActualTable': "Successfully created Actual table.",
+      'failedToCreateActualTable': "Failed to create Actual table.",
+      'startingToCreateTemporaryTable': "Creating Temporary table.",
+      'waitingToCreateTemporaryTable': "Waiting for creation of Temporary table.",
+      'successfullyCreatedTemporaryTable': "Successfully created Temporary table.",
+      'failedToCreateTemporaryTable': " Failed to create temporary table.",
+      'deletingTable': "Deleting {{table}} table.",
+      'succesfullyDeletedTable': "Successfully deleted {{ table}} table.",
+      'failedToDeleteTable': "Failed to delete {{table}} table.",
+      'startingToUploadFile': "Uploading file.",
+      'waitingToUploadFile': "Waiting for uploading file.",
+      'successfullyUploadedFile': "Successfully uploaded file.",
+      'failedToUploadFile': "Failed to upload file.",
+      'startingToInsertRows': "Inserting rows from temporary table to actual table.",
+      'waitingToInsertRows': "Waiting for insertion of rows from temporary table to actual table.",
+      'successfullyInsertedRows': "Successfully inserted rows from temporary table to actual table.",
+      'failedToInsertRows': "Failed to insert rows from temporary table to actual table.",
+      'startingToDeleteTemporaryTable': "Deleting temporary table.",
+      'waitingToDeleteTemporaryTable': "Waiting for deletion of temporary table.",
+      'successfullyDeletedTemporaryTable': "Successfully deleted temporary table",
+      'manuallyDeleteTable': "You will have to manually delete the table {{databaseName}}.{{tableName}}",
+      'uploadingFromHdfs': "Uploading file from HDFS ",
+      'successfullyUploadedTableMessage': "Table {{tableName}} created in database {{databaseName}}",
+      'successfullyUploadedTableHeader': "Uploaded Successfully"
+    },
+  }
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/models/column.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/column.js b/contrib/views/hive20/src/main/resources/ui/app/models/column.js
index b1fa99c..1d9ccce 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/models/column.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/column.js
@@ -19,8 +19,7 @@
 import Ember from 'ember';
 import datatypes from '../configs/datatypes';
 import Helper from '../configs/helpers';
-
-export default Ember.Object.extend({
+let Column = Ember.Object.extend(Ember.Copyable,{
   name: '',
   type: datatypes[0],
   precision: null,
@@ -103,5 +102,23 @@ export default Ember.Object.extend({
 
     }
     return this.get('errors.length') === 0;
+  },
+
+  copy: function(){
+    return Column.create({
+      name: this.get("name"),
+      type: this.get("type"),
+      precision: this.get("percision"),
+      scale: this.get("scale"),
+      isPartitioned: this.get("isPartitioned"),
+      isClustered: this.get("isClustered"),
+      comment: this.get("comment"),
+
+      errors: this.get("errors").copy(),
+      editing: this.get("editing"),
+    });
   }
-})
+
+});
+
+export default Column;

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/router.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/router.js b/contrib/views/hive20/src/main/resources/ui/app/router.js
index 34f1a66..52361ff 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/router.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/router.js
@@ -38,6 +38,7 @@ Router.map(function() {
       this.route('tables', {path: '/tables'}, function() {
         this.route('new-database');
         this.route('new');
+        this.route('upload-table');
         this.route('table', {path: '/:name'}, function() {
           this.route('rename');
           this.route('columns');
@@ -62,7 +63,6 @@ Router.map(function() {
 
     });
   });
-
 });
 
 export default Router;

http://git-wip-us.apache.org/repos/asf/ambari/blob/536192bb/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js
index 6dfdf29..c8ad239 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/new.js
@@ -27,42 +27,64 @@ export default Ember.Route.extend({
     controller.set('tabs', Ember.copy(tabs));
   },
 
+  // function is used in sub-classes
+  /**
+   * @param settings
+   * @param shouldTransition : should transition to other route?
+   * @returns {Promise.<TResult>|*}
+   */
+  createTable: function(settings, shouldTransition){
+    this.controller.set('showCreateTableModal', true);
+    this.controller.set('createTableMessage', 'Submitting request to create table');
+    let databaseModel = this.controllerFor('databases.database').get('model');
+    return this.get('tableOperations').submitCreateTable(databaseModel.get('name'), settings)
+      .then((job) => {
+        console.log('Created job: ', job.get('id'));
+        this.controller.set('createTableMessage', 'Waiting for the table to be created');
+        return this.get('tableOperations').waitForJobToComplete(job.get('id'), 5 * 1000)
+          .then((status) => {
+            this.controller.set('createTableMessage', "Successfully created table");
+            Ember.run.later(() => {
+            this.controller.set('showCreateTableModal', false);
+            this.controller.set('createTableMessage');
+            this._addTableToStoreLocally(databaseModel, settings.name);
+            this._resetModelInTablesController(databaseModel.get('tables'));
+              if(shouldTransition){
+                this._transitionToCreatedTable(databaseModel.get('name'), settings.name);
+              }
+            }, 2 * 1000);
+            return Ember.RSVP.Promise.resolve(job);
+          }, (error) => {
+            // TODO: handle error
+            Ember.run.later(() => {
+              this.controller.set('showCreateTableModal', false);
+              this.controller.set('createTableMessage');
+              if(shouldTransition) {
+                this.transitionTo('databases.database', databaseModel.get('name'));
+              }
+            }, 2 * 1000);
+
+            return Ember.RSVP.Promise.reject(error);
+          });
+      }, (error) => {
+        console.log("Error encountered", error);
+        this.controller.set('showCreateTableModal', true);
+        throw error;
+      });
+  },
   actions: {
     cancel() {
       let databaseController = this.controllerFor('databases.database');
       this.transitionTo('databases.database', databaseController.get('model'));
     },
+    toggleCSVFormat: function() {
+      console.log("inside new route toggleCSVFormat");
+      this.toggleProperty('showCSVFormatInput')
+    },
 
     create(settings) {
-      this.controller.set('showCreateTableModal', true);
-      this.controller.set('createTableMessage', 'Submitting request to create table');
-      let databaseModel = this.controllerFor('databases.database').get('model');
-      this.get('tableOperations').submitCreateTable(databaseModel.get('name'), settings)
-        .then((job) => {
-          console.log('Created job: ', job.get('id'));
-          this.controller.set('createTableMessage', 'Waiting for the table to be created');
-          this.get('tableOperations').waitForJobToComplete(job.get('id'), 5 * 1000)
-            .then((status) => {
-              this.controller.set('createTableMessage', "Successfully created table");
-              Ember.run.later(() => {
-                this.controller.set('showCreateTableModal', false);
-                this.controller.set('createTableMessage');
-                this._addTableToStoreLocally(databaseModel, settings.name);
-                this._resetModelInTablesController(databaseModel.get('tables'));
-                this._transitionToCreatedTable(databaseModel.get('name'), settings.name);
-              }, 2 * 1000);
-            }, (error) => {
-              // TODO: handle error
-              Ember.run.later(() => {
-                this.controller.set('showCreateTableModal', false);
-                this.controller.set('createTableMessage');
-                this.transitionTo('databases.database', databaseModel.get('name'));
-              }, 2 * 1000);
-            });
-        }, (error) => {
-          console.log("Error encountered", error);
-          this.controller.set('showCreateTableModal', true);
-        });
+      // keep this a function call call only as the createTable function is used in sub-classes
+      this.createTable(settings, true);
     }
   },