You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by pa...@apache.org on 2016/06/30 11:46:21 UTC

[1/2] ambari git commit: AMBARI-17421 adding opencsv parser, added support for delimiters and endline characters in values. (Nitiraj Ratjore via pallavkul)

Repository: ambari
Updated Branches:
  refs/heads/trunk 8df1e8f01 -> 33d905062


http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
index 6fa6cfd..51d2624 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/upload-table.js
@@ -22,6 +22,44 @@ import constants from 'hive/utils/constants';
 
 
 export default Ember.Controller.extend({
+  DEFAULT_CSV_DELIMITER: ',',
+  DEFAULT_CSV_QUOTE: '"',
+  DEFAULT_CSV_ESCAPE: '\\',
+  NON_PRINTABLE_CHARS:[{"id":"0", "name":"NUL", "description":"(null)"},
+    {"id":"1", "name":"SOH", "description":"(start of heading)"},
+    {"id":"2", "name":"STX", "description":"(start of text)"},
+    {"id":"3", "name":"ETX", "description":"(end of text)"},
+    {"id":"4", "name":"EOT", "description":"(end of transmission)"},
+    {"id":"5", "name":"ENQ", "description":"(enquiry)"},
+    {"id":"6", "name":"ACK", "description":"(acknowledge)"},
+    {"id":"7", "name":"BEL", "description":"(bell)"},
+    {"id":"8", "name":"BS", "description":"(backspace)"},
+    {"id":"9", "name":"TAB", "description":"(horizontal tab)"},
+    {"id":"10", "name":"LF", "description":"(NL line feed - new line)"},
+    {"id":"11", "name":"VT", "description":"(vertical tab)"},
+    {"id":"12", "name":"FF", "description":"(NP form feed - new page)"},
+    {"id":"13", "name":"CR", "description":"(carriage return)"},
+    {"id":"14", "name":"SO", "description":"(shift out)"},
+    {"id":"15", "name":"SI", "description":"(shift in)"},
+    {"id":"16", "name":"DLE", "description":"(data link escape)"},
+    {"id":"17", "name":"DC1", "description":"(device control 1)"},
+    {"id":"18", "name":"DC2", "description":"(device control 2)"},
+    {"id":"19", "name":"DC3", "description":"(device control 3)"},
+    {"id":"20", "name":"DC4", "description":"(device control 4)"},
+    {"id":"21", "name":"NAK", "description":"(negative ackowledge)"},
+    {"id":"22", "name":"SYN", "description":"(synchronous idle)"},
+    {"id":"23", "name":"ETB", "description":"(end of trans. block)"},
+    {"id":"24", "name":"CAN", "description":"(cancel)"},
+    {"id":"25", "name":"EM", "description":"(end of medium)"},
+    {"id":"26", "name":"SUB", "description":"(substitute)"},
+    {"id":"27", "name":"ESC", "description":"(escape)"},
+    {"id":"28", "name":"FS", "description":"(file separator)"},
+    {"id":"29", "name":"GS", "description":"(group separator)"},
+    {"id":"30", "name":"RS", "description":"(record separator)"},
+    {"id":"31", "name":"US", "description":"(unit separator)"},
+    {"id":"32", "name":"Space", "description":""},
+    {"id":"127", "name":"DEL", "description":""}
+  ],
   COLUMN_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
   TABLE_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
   HDFS_PATH_REGEX: "^[/]{1}.+",  // unix path allows everything but here we have to mention full path so starts with /
@@ -48,38 +86,22 @@ export default Ember.Controller.extend({
   uploadProgressInfos : [],
   DEFAULT_DB_NAME : 'default',
   showPreview : false,
-  onChangeUploadSource : function(){
-    this.clearFields();
-  }.observes("uploadSource"),
-  setDefaultDB : function(){
-    var self = this;
-    var defaultDatabase = this.get('databases').find(
-      function(item,index){
-        if(item.id == self.DEFAULT_DB_NAME )
-          return true;
-      }
-    );
-
-    console.log("setting the initial database to : " + defaultDatabase);
-    self.set("selectedDatabase",defaultDatabase);
-  },
-  init: function() {
-    this.setDefaultDB();
-  },
-  uploadProgressInfo : Ember.computed("uploadProgressInfos.[]",function(){
-    var info = "";
-    for( var i = 0 ; i < this.get('uploadProgressInfos').length ; i++)
-        info += this.get('uploadProgressInfos').objectAt(i);
-
-    return new Ember.Handlebars.SafeString(info);
-  }),
+  containsEndlines: false,
   inputFileTypes :[
     {id : "CSV", name : "CSV"},
     {id : "JSON", name : "JSON"},
     {id : "XML", name : "XML"}
   ],
-  inputFileType : {id : "CSV", name : "CSV"},
+  inputFileType: null,
   inputFileTypeCSV : Ember.computed.equal('inputFileType.id',"CSV"),
+  storedAsTextFile : Ember.computed.equal("selectedFileType","TEXTFILE"),
+  storedAsNotTextFile : Ember.computed.not("storedAsTextFile"),
+  csvDelimiter: null,
+  csvQuote : null,
+  csvEscape : null,
+  asciiList:[],
+  fieldsTerminatedBy: null,
+  escapedBy: null,
   fileTypes:[
     "SEQUENCEFILE",
     "TEXTFILE"    ,
@@ -87,10 +109,13 @@ export default Ember.Controller.extend({
     "ORC"         ,
     "PARQUET"     ,
     "AVRO"
-    //,
-    //"INPUTFORMAT"  -- not supported as of now.
   ],
-  selectedFileType: "ORC",
+  selectedFileType: null,
+  onChangeSelectedFileType: function(){
+    if(this.get('selectedFileType') === this.get('fileTypes')[1] && this.get('containsEndlines') === true){
+      this.set('containsEndlines', false);
+    }
+  }.observes("selectedFileType", "containsEndlines"),
   dataTypes: [
     "TINYINT", //
     "SMALLINT", //
@@ -107,9 +132,61 @@ export default Ember.Controller.extend({
     "VARCHAR", // -- (Note: Available in Hive 0.12.0 and later)
     "CHAR" // -- (Note: Available in Hive 0.13.0 and later)
   ],
+  setDefaultDB : function(){
+    var self = this;
+    var defaultDatabase = this.get('databases').find(
+      function(item,index){
+        if(item.id == self.DEFAULT_DB_NAME )
+          return true;
+      }
+    );
+
+    console.log("setting the initial database to : " + defaultDatabase);
+    self.set("selectedDatabase",defaultDatabase);
+  },
+  init: function () {
+    this.setDefaultDB();
+    this.fillAsciiList();
+    this.set("selectedFileType", this.get("fileTypes")[3]);
+    this.set("inputFileType", this.get("inputFileTypes")[0]);
+  },
+  onChangeUploadSource : function(){
+    this.clearFields();
+  }.observes("uploadSource"),
+  fillAsciiList: function(){
+    var list = this.get('asciiList');
+    list.push({"id": -1, "name": ""});
+    var nonPrintable = this.get('NON_PRINTABLE_CHARS');
+    for( var i = 0 ; i <= 127 ; i++ ){
+      var charInfo = nonPrintable.find(function(item){
+        return item.id == i;
+      });
+      if(!charInfo){
+        charInfo = {"id": i, "name": String.fromCodePoint(i), "description":"" };
+      }
+      var option = {"id": i, "name": charInfo.id + "    " + charInfo.name + charInfo.description};
+      list.push(option);
+      if(i === 44){
+        this.set("csvDelimiter", option);
+      }
+      else if(i === 34){
+        this.set("csvQuote", option);
+      }
+      else if(i === 92){
+        this.set("csvEscape", option);
+      }
+    }
+  },
+  uploadProgressInfo : Ember.computed("uploadProgressInfos.[]",function(){
+    var info = "";
+    for( var i = 0 ; i < this.get('uploadProgressInfos').length ; i++)
+      info += this.get('uploadProgressInfos').objectAt(i);
+
+    return new Ember.Handlebars.SafeString(info);
+  }),
   _setHeaderElements : function(header,valueArray){
     header.forEach(function (item, index) {
-      Ember.set(item, 'name',  valueArray.objectAt(index));
+      Ember.set(item, 'name',  valueArray[index]);
     }, this);
   },
   isFirstRowHeaderDidChange: function () {
@@ -158,8 +235,9 @@ export default Ember.Controller.extend({
     this.set("hdfsPath");
     this.set("header");
     this.set("rows");
+    this.set("escapedBy");
+    this.set("fieldsTerminatedBy");
     this.set("error");
-    this.set('isFirstRowHeader',false);
     this.set('files');
     this.set("firstRow");
     this.set("selectedDatabase",null);
@@ -215,20 +293,56 @@ export default Ember.Controller.extend({
   uploadForPreview: function (files) {
     console.log("uploaderForPreview called.");
     var self = this;
+    var csvParams = this.getCSVParams();
+
     return this.get('uploader').uploadFiles('preview', files, {
       "isFirstRowHeader": self.get("isFirstRowHeader"),
-      "inputFileType": self.get("inputFileType").id
+      "inputFileType": self.get("inputFileType").id,
+      "csvDelimiter": csvParams.csvDelimiter,
+      "csvEscape": csvParams.csvEscape,
+      "csvQuote": csvParams.csvQuote
     });
   },
 
+  getAsciiChar : function(key){
+    if(!key){
+      return null;
+    }
+
+    var value = this.get(key);
+    if(value && value.id != -1) {
+      return String.fromCharCode(value.id);
+    }else{
+      return null;
+    }
+  },
+  getCSVParams : function(){
+    var csvd = this.getAsciiChar('csvDelimiter');
+    if(!csvd && csvd != 0) csvd = this.get('DEFAULT_CSV_DELIMITER');
+
+    var csvq = this.getAsciiChar('csvQuote');
+    if(!csvq && csvq != 0) csvq = this.get('DEFAULT_CSV_QUOTE');
+
+    var csve = this.getAsciiChar('csvEscape');
+    if(!csve && csve != 0) csve = this.get('DEFAULT_CSV_ESCAPE');
+
+    return {"csvDelimiter": csvd, "csvQuote" : csvq, "csvEscape": csve};
+  },
+
   uploadForPreviewFromHDFS: function () {
     console.log("uploadForPreviewFromHDFS called.");
+    var self = this;
     var hdfsPath = this.get("hdfsPath");
     this.validateHDFSPath(hdfsPath);
+    var csvParams = this.getCSVParams();
+
     return this.get('uploader').previewFromHDFS({
       "isFirstRowHeader": this.get("isFirstRowHeader"),
       "inputFileType": this.get("inputFileType").id,
-      "hdfsPath": hdfsPath
+      "hdfsPath": hdfsPath,
+      "csvDelimiter": csvParams.csvDelimiter,
+      "csvEscape": csvParams.csvEscape ,
+      "csvQuote": csvParams.csvQuote
     });
   },
 
@@ -273,16 +387,25 @@ export default Ember.Controller.extend({
     console.log('inside previewTable');
     var self = this;
     var defaultColumnNames = data.header.map(function(item,index){
-      return self.COLUMN_NAME_PREFIX + index;
+      return self.COLUMN_NAME_PREFIX + (index + 1);
     });
     this.set("defaultColumnNames",defaultColumnNames);
     this.set("header", data.header);
-    this.set("firstRow", data.rows[0].row);
     this.set('isFirstRowHeader', data.isFirstRowHeader);
     this.set('tableName', data.tableName);
+    var firstRow = null;
     if (data.isFirstRowHeader == true) {
-      data.rows = data.rows.slice(1);
+      firstRow = data.header.map(function(columnDesc){
+        return columnDesc.name;
+      });
+    }else {
+      if(data.rows.length > 0){
+        firstRow = data.rows[0].row;
+      }else{
+        firstRow = [];
+      }
     }
+    this.set("firstRow", firstRow);
     this.set("rows", data.rows);
   },
 
@@ -302,6 +425,7 @@ export default Ember.Controller.extend({
 
   createActualTable: function () {
     console.log("createActualTable");
+    var self = this;
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateActualTable'));
     var headers = this.get('header');
     var selectedDatabase = this.get('selectedDatabase');
@@ -309,7 +433,7 @@ export default Ember.Controller.extend({
       throw new Error(this.translate('hive.errors.emptyDatabase', {database : this.translate("hive.words.database")}));
     }
 
-    this.set('databaseName', this.get('selectedDatabase').get('name'));
+    this.set('databaseName', this.get('selectedDatabase.id'));
     var databaseName = this.get('databaseName');
     var tableName = this.get('tableName');
     var isFirstRowHeader = this.get('isFirstRowHeader');
@@ -317,16 +441,21 @@ export default Ember.Controller.extend({
 
     this.validateInput(headers,tableName,databaseName,isFirstRowHeader);
     this.showUploadModal();
-
+    var rowFormat = this.getRowFormat();
     return this.get('uploader').createTable({
       "isFirstRowHeader": isFirstRowHeader,
       "header": headers,
       "tableName": tableName,
       "databaseName": databaseName,
-      "fileType":filetype
+      "hiveFileType":filetype,
+      "rowFormat": { "fieldsTerminatedBy" : rowFormat.fieldsTerminatedBy, "escapedBy" : rowFormat.escapedBy}
     });
   },
-
+  getRowFormat : function(){
+    var fieldsTerminatedBy = this.getAsciiChar('fieldsTerminatedBy');
+    var escapedBy = this.getAsciiChar('escapedBy');
+    return {"fieldsTerminatedBy": fieldsTerminatedBy, "escapedBy" : escapedBy};
+  },
   waitForCreateActualTable: function (jobId) {
     console.log("waitForCreateActualTable");
     this.popUploadProgressInfos();
@@ -338,31 +467,39 @@ export default Ember.Controller.extend({
 
     return p;
   },
-
   onCreateActualTableSuccess: function () {
     console.log("onCreateTableSuccess");
     this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyCreatedActualTable'));
   },
-
   onCreateActualTableFailure: function (error) {
     console.log("onCreateActualTableFailure");
     this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToCreateActualTable'));
     this.setError(error);
   },
-
   createTempTable: function () {
+    var self = this;
     console.log("createTempTable");
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateTemporaryTable'));
     var tempTableName = this.generateTempTableName();
     this.set('tempTableName', tempTableName);
+
+    var headers = this.get("header");
+    if(this.get("containsEndlines")){
+      headers = this.get("header").map(function(item){
+        var header = JSON.parse(JSON.stringify(item));
+        header.type = "STRING";
+        return header;
+      });
+    }
     return this.get('uploader').createTable({
       "isFirstRowHeader": this.get("isFirstRowHeader"),
-      "header": this.get("header"),
+      "header": headers,
       "tableName": tempTableName,
       "databaseName": this.get('databaseName'),
-      "fileType": "TEXTFILE"
+      "hiveFileType":"TEXTFILE",
+      "rowFormat": { "fieldsTerminatedBy" : parseInt('1', 10), "escapedBy" : null}
     });
   },
 
@@ -398,9 +535,9 @@ export default Ember.Controller.extend({
     var self = this;
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.deletingTable',{table:tableLabel}));
 
-    return this.deleteTable(databaseName, tableName).then(function (data) {
+    return this.deleteTable(databaseName, tableName).then(function (job) {
       return new Ember.RSVP.Promise(function (resolve, reject) {
-        self.waitForJobStatus(data.jobId, resolve, reject);
+        self.waitForJobStatus(job.id, resolve, reject);
       });
     }).then(function () {
       self.popUploadProgressInfos();
@@ -500,7 +637,9 @@ export default Ember.Controller.extend({
       "fromDatabase": this.get("databaseName"),
       "fromTable": this.get("tempTableName"),
       "toDatabase": this.get("databaseName"),
-      "toTable": this.get("tableName")
+      "toTable": this.get("tableName"),
+      "header": this.get("header"),
+      "unhexInsert": this.get("containsEndlines")
     });
   },
 
@@ -543,7 +682,6 @@ export default Ember.Controller.extend({
       this.get("tempTableName")
     );
   },
-
   waitForDeleteTempTable: function (jobId) {
     console.log("waitForDeleteTempTable");
     this.popUploadProgressInfos();
@@ -555,20 +693,23 @@ export default Ember.Controller.extend({
 
     return p;
   },
-
   onDeleteTempTableSuccess: function () {
     console.log("onDeleteTempTableSuccess");
     this.popUploadProgressInfos();
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyDeletedTemporaryTable'));
     this.onUploadSuccessfull();
   },
-
   onDeleteTempTableFailure: function (error) {
     console.log("onDeleteTempTableFailure");
     this.setError(error);
     this.setError(this.formatMessage('hive.messages.manuallyDeleteTable',{databaseName:this.get('databaseName'), tableName: this.get("tempTableName")}));
   },
-
+  validateHDFSPath: function (hdfsPath) {
+    if (null == hdfsPath || hdfsPath == "") throw new Error(this.translate('hive.errors.emptyHdfsPath'));
+    var hdfsRegex = new RegExp(this.get("HDFS_PATH_REGEX"), "g");
+    var mArr = hdfsPath.match(hdfsRegex);
+    if (mArr == null || mArr.length != 1) throw new Error(this.translate('hive.errors.illegalHdfPath', {"hdfsPath": hdfsPath} ));
+  },
   createTableAndUploadFile: function () {
     var self = this;
     self.setError();
@@ -664,14 +805,15 @@ export default Ember.Controller.extend({
           self.onDeleteTempTableFailure(error);
         }
         throw error;
-      }).catch(function(error){
+      })
+     .catch(function(error){
         console.log("inside catch : ", error);
-      }).finally(function(){
+      })
+      .finally(function(){
         console.log("finally hide the modal always");
         self.hideUploadModal();
       });
   },
-
   validateInput: function (headers,tableName,databaseName,isFirstRowHeader) {
     // throw exception if invalid.
     if(!headers || headers.length == 0) throw new Error(this.translate('hive.errors.emptyHeaders'));
@@ -695,41 +837,52 @@ export default Ember.Controller.extend({
       throw new Error(this.translate('hive.errors.emptyIsFirstRow', {isFirstRowHeaderField:this.translate('hive.ui.isFirstRowHeader')}));
     }
   },
-
   setError: function (error) {
     if(error){
-      console.log("upload table error : ", error);
+      console.log(" error : ", error);
       this.set('error', JSON.stringify(error));
       this.get('notifyService').error(error);
     }else{
       this.set("error");
     }
   },
-
   previewError: function (error) {
     this.setError(error);
   },
-
   uploadTableFromHdfs : function(){
     console.log("uploadTableFromHdfs called.");
     if(!(this.get("inputFileTypeCSV") == true && this.get("isFirstRowHeader") == false) ){
       this.pushUploadProgressInfos(this.formatMessage('uploadingFromHdfs'));
     }
-    return  this.get('uploader').uploadFromHDFS({
-        "isFirstRowHeader": this.get("isFirstRowHeader"),
-        "databaseName" :  this.get('databaseName'),
-        "tableName" : this.get("tempTableName"),
-        "inputFileType" : this.get("inputFileType").id,
-        "hdfsPath" : this.get("hdfsPath")
-      });
+    var csvParams = this.getCSVParams();
+
+    return this.get('uploader').uploadFromHDFS({
+      "isFirstRowHeader": this.get("isFirstRowHeader"),
+      "databaseName": this.get('databaseName'),
+      "tableName": this.get("tempTableName"),
+      "inputFileType": this.get("inputFileType").id,
+      "hdfsPath": this.get("hdfsPath"),
+      "header": this.get("header"),
+      "containsEndlines": this.get("containsEndlines"),
+      "csvDelimiter": csvParams.csvDelimiter,
+      "csvEscape": csvParams.csvEscape,
+      "csvQuote": csvParams.csvQuote
+    });
   },
   uploadTable: function () {
     this.printValues();
+    var csvParams = this.getCSVParams();
+
     return this.get('uploader').uploadFiles('upload', this.get('files'), {
       "isFirstRowHeader": this.get("isFirstRowHeader"),
       "databaseName" :  this.get('databaseName'),
       "tableName" : this.get("tempTableName"),
-      "inputFileType" : this.get("inputFileType").id
+      "inputFileType" : this.get("inputFileType").id,
+      "header": JSON.stringify(this.get("header")),
+      "containsEndlines": this.get("containsEndlines"),
+      "csvDelimiter": csvParams.csvDelimiter,
+      "csvEscape": csvParams.csvEscape ,
+      "csvQuote": csvParams.csvQuote
     });
   },
 
@@ -755,6 +908,22 @@ export default Ember.Controller.extend({
   },
   displayOption: "display:none",
   actions: {
+    hideInputParamModal : function(){
+      Ember.$("#inputParamsModal").modal("hide");
+    },
+    showInputParamModal : function(){
+      if(this.get('inputFileTypeCSV')){
+        Ember.$("#inputParamsModal").modal("show");
+      }
+    },
+    hideRowFormatModal : function(){
+      Ember.$("#rowFormatModal").modal("hide");
+    },
+    showRowFormatModal : function(){
+      if(this.get('storedAsTextFile')) {
+        Ember.$("#rowFormatModal").modal("show");
+      }
+    },
     toggleErrors: function () {
       this.toggleProperty('showErrors');
     },

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
index a00f0b4..f7f7706 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
@@ -248,11 +248,13 @@ TRANSLATIONS = {
       'no.query': "No query to process.",
       'emptyDatabase' : "Please select {{ database }}.",
       'emptyTableName' : "Please enter {{ tableNameField }}.",
-      'illegalTableName':"Illegal {{ tableNameField }} : '{{ tableName }}'",
+      'illegalTableName': "Illegal {{ tableNameField }} : '{{ tableName }}'",
       'emptyIsFirstRow' : "{{isFirstRowHeaderField}} cannot be null.",
-      'emptyHeaders':"Headers (containing column names) cannot be null.",
-      'emptyColumnName':"Column name cannot be null.",
-      'illegalColumnName':"Illegal column name : '{{columnName}}' in column number {{index}}",
+      'emptyHeaders': "Headers (containing column names) cannot be null.",
+      'emptyColumnName': "Column name cannot be null.",
+      'illegalColumnName': "Illegal column name : '{{columnName}}' in column number {{index}}",
+      'emptyHdfsPath': "HdfsPath Name cannot be null or empty.",
+      'illegalHdfPath': "Illegal hdfs path : {{hdfsPath}}"
     },
     messages : {
       'generatingPreview':"Generating Preview.",
@@ -286,7 +288,7 @@ TRANSLATIONS = {
     words :{
       temporary : "Temporary",
       actual : "Actual",
-      database : "Database",
+      database : "Database"
     },
     ui : {
       'uploadProgress' : "Upload Progress",
@@ -306,8 +308,19 @@ TRANSLATIONS = {
       'columnNameErrorMessage':"Only alphanumeric and underscore characters are allowed in column names.",
       'hdfsFieldTooltip':"Enter full HDFS path",
       'hdfsFieldPlaceholder':"Enter full HDFS path",
-      'hdfsFieldErrorMessage':"Please enter complete path of hdfs file to upload."
-
+      'hdfsFieldErrorMessage':"Please enter complete path of hdfs file to upload.",
+      'containsEndlines': "Contains endlines?",
+      'columnDelimiterTooltip': "Delimiter for the column values. Default is comman (,).",
+      'escapeCharacterTooltip': "Escape character. Default is backslash (\).",
+      'quoteCharacterTooltip': 'Quote character. Default is double quote (").',
+      'quoteCharacterField': "Quote Character",
+      'escapeCharacterField': "Escape Character",
+      'columnDelimterField': "Field Delimiter",
+      'fieldsTerminatedByField': "Fields Terminated By",
+      'escapedByField': "Escape By",
+      'escapedByTooltip': "Escaped By character for Hive table.",
+      'fieldsTerminatedByTooltip': "Fields Terminated By character for Hive table.",
+      'isFirstRowHeaderTooltip': "Check if the first row of CSV is a header."
     }
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss b/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
index aeddaf6..7598b0e 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
@@ -703,4 +703,14 @@ pre.explainprint{
  line-height: 0.5;
  padding: 12px 15px;
 }
+.settings-gear {
+  padding:5px;
+  cursor: pointer;
+}
 
+.settings-gear-disabled {
+  @extend .settings-gear;
+
+  color:#ddd;
+  cursor: default;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
index 5e3a519..d84dc4d 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
@@ -44,8 +44,105 @@
   </div>
 </div>
 
+<!-- CSV Input Modal -->
+<div class="modal fade" id="inputParamsModal" role="dialog">
+  <div class="modal-dialog">
+    <div class="modal-content">
+      <div class="modal-header">
+        <button type="button" class="close" data-dismiss="modal">&times;</button>
+      </div>
+      <div class="modal-body">
+        <table class="table data-upload-form">
+          <tr>
+            <td><label>{{t 'hive.ui.columnDelimterField'}}:</label></td>
+            <td class="data-upload-form-field" title={{t 'hive.ui.columnDelimiterTooltip'}}>
+              {{typeahead-widget
+              content=asciiList
+              optionValuePath="id"
+              optionLabelPath="name"
+              selection=csvDelimiter
+              }}
+            </td>
+          </tr>
+          <tr>
+            <td><label>{{t 'hive.ui.escapeCharacterField'}}:</label></td>
+            <td class="data-upload-form-field" title={{t 'hive.ui.escapeCharacterTooltip'}}>
+              {{typeahead-widget
+              content=asciiList
+              optionValuePath="id"
+              optionLabelPath="name"
+              selection=csvEscape
+              }}
+            </td>
+          </tr>
+          <tr>
+            <td><label>{{t 'hive.ui.quoteCharacterField'}}:</label></td>
+            <td class="data-upload-form-field" title={{t 'hive.ui.quoteCharacterTooltip'}}>
+              {{typeahead-widget
+              content=asciiList
+              optionValuePath="id"
+              optionLabelPath="name"
+              selection=csvQuote
+              }}
+            </td>
+          </tr>
+          <tr>
+            <td><label>{{t "hive.ui.isFirstRowHeader"}}</label></td>
+            <td class="data-upload-form-field"  title={{t 'hive.ui.isFirstRowHeaderTooltip'}}>
+              {{input id="isFirstRowHeader" type="checkbox" checked=isFirstRowHeader }}
+            </td>
+          </tr>
+        </table>
+      </div>
+      <div class="modal-footer">
+        <button type="submit" class="btn btn-default btn-default pull-right" data-dismiss="modal"> Close</button>
+      </div>
+    </div>
+  </div>
+</div>
+
+<!-- Row Format Modal -->
+<div class="modal fade" id="rowFormatModal" role="dialog">
+  <div class="modal-dialog">
+    <div class="modal-content">
+      <div class="modal-header">
+        <button type="button" class="close" data-dismiss="modal">&times;</button>
+      </div>
+      <div class="modal-body">
+        <table class="table data-upload-form">
+          <tr>
+            <td><label>{{t 'hive.ui.fieldsTerminatedByField'}}:</label></td>
+            <td class="data-upload-form-field" title={{t 'hive.ui.fieldsTerminatedByTooltip'}}>
+              {{typeahead-widget
+              content=asciiList
+              optionValuePath="id"
+              optionLabelPath="name"
+              selection=fieldsTerminatedBy
+              }}
+            </td>
+          </tr>
+          <tr>
+            <td><label>{{t 'hive.ui.escapedByField'}}:</label></td>
+            <td class="data-upload-form-field" title={{t 'hive.ui.escapedByTooltip'}}>
+              {{typeahead-widget
+              content=asciiList
+              optionValuePath="id"
+              optionLabelPath="name"
+              selection=escapedBy
+              }}
+            </td>
+          </tr>
+        </table>
+      </div>
+      <div class="modal-footer">
+        <button type="submit" class="btn btn-default btn-default pull-right" data-dismiss="modal"> Close</button>
+      </div>
+    </div>
+  </div>
+</div>
+
 <div class="pull-right">
-  <i class="query-menu-tab fa queries-icon fa-envelope" {{ action 'toggleErrors'}}></i>
+  <i class="query-menu-tab fa queries-icon fa-envelope" {{ action 'toggleErrors'}} ></i>
 </div>
 <div {{bind-attr class="showErrors:hide-data:show-data"}}>
   <div>
@@ -60,15 +157,20 @@
       <tr>
         <td class="data-upload-form-label"><label>{{t "hive.ui.fileType"}}</label></td>
         <td class="data-upload-form-field">
-          {{typeahead-widget
-          content=inputFileTypes
-          optionValuePath="id"
-          optionLabelPath="name"
-          selection=inputFileType
-          placeholder=(t "hive.ui.uploadFromHdfs")
-          }}
+          <div class="col-md-11" style="padding:0">
+            {{typeahead-widget
+            content=inputFileTypes
+            optionValuePath="id"
+            optionLabelPath="name"
+            selection=inputFileType
+            placeholder="Select File Type"}}
+          </div>
+          <div class="col-md-1" style="padding: 0">
+            <span {{bind-attr class=":queries-icon :fa :fa-gear inputFileTypeCSV:settings-gear:settings-gear-disabled"}}
+                  title="Settings" {{action "showInputParamModal"}}>
+            </span>
+          </div>
         </td>
-
         {{#if isLocalUpload }}
           <td class="data-upload-form-label"><label>{{t "hive.ui.selectFromLocal"}}</label></td>
           <td class="data-upload-form-field">{{file-upload  filesUploaded="filesUploaded"  uploadFiles=files}}</td>
@@ -85,43 +187,48 @@
         {{/if}}
       </tr>
       {{#if showPreview}}
-        <tr>
-          <td class="data-upload-form-label"><label>{{t "hive.words.database"}}</label></td>
-          <td class="data-upload-form-field">
-            {{typeahead-widget
-            content=databases
-            optionValuePath="id"
-            optionLabelPath="name"
-            selection=selectedDatabase
-            placeholder=(t "hive.ui.selectDatabase")
-            }}
-          </td>
+      <tr>
+        <td class="data-upload-form-label"><label>{{t "hive.words.database"}}</label></td>
+        <td class="data-upload-form-field">
+          {{typeahead-widget
+          content=databases
+          optionValuePath="id"
+          optionLabelPath="name"
+          selection=selectedDatabase
+          placeholder=(t "hive.ui.selectDatabase")
+          }}
+        </td>
 
-          <td class="data-upload-form-label"><label>{{t "hive.ui.tableName"}}</label></td>
-          <td
-            class="data-upload-form-field">
-            {{#validated-text-field inputValue=tableName allowEmpty=false
-            tooltip=(t "hive.ui.tableNameTooltip")
-            invalidClass='form-control red-border' validClass='form-control' regex=TABLE_NAME_REGEX
-            errorMessage=(t "hive.ui.tableNameErrorMessage") }}
-            {{/validated-text-field}}
-          </td>
-        </tr>
-        <tr>
-          <td class="data-upload-form-label"><label>{{t "hive.ui.storedAs"}}</label></td>
+        <td class="data-upload-form-label"><label>{{t "hive.ui.tableName"}}</label></td>
+        <td class="data-upload-form-field">
+          {{#validated-text-field inputValue=tableName allowEmpty=false
+          tooltip=(t "hive.ui.tableNameTooltip")
+          invalidClass='form-control red-border' validClass='form-control' regex=TABLE_NAME_REGEX
+          errorMessage=(t "hive.ui.tableNameErrorMessage") }}
+          {{/validated-text-field}}
+        </td>
+      </tr>
+      <tr>
+        <td class="data-upload-form-label"><label>{{t "hive.ui.storedAs"}}</label></td>
+        <td class="data-upload-form-field">
+          <div class="col-md-11" style="padding: 0">
+              {{typeahead-widget
+              content=fileTypes
+              selection=selectedFileType}}
+          </div>
+            <div class="col-md-1" style="padding: 0">
+              <span {{bind-attr class=":queries-icon :fa :fa-gear storedAsTextFile:settings-gear:settings-gear-disabled"}}
+                    title="Settings" {{action "showRowFormatModal"}}>
+              </span>
+            </div>
+        </td>
+        {{#if storedAsNotTextFile}}
+          <td class="data-upload-form-label"><label>{{t "hive.ui.containsEndlines"}}</label></td>
           <td class="data-upload-form-field">
-            {{typeahead-widget
-            content=fileTypes
-            selection=selectedFileType}}
+            {{input type="checkbox" checked=containsEndlines }}
           </td>
-          {{#if inputFileTypeCSV }}
-            <td class="data-upload-form-label"><label>{{t "hive.ui.isFirstRowHeader"}}</label></td>
-            <td class="data-upload-form-field">
-              {{input id="isFirstRowHeader" type="checkbox" checked=isFirstRowHeader }}
-            </td>
-
-          {{/if}}
-        </tr>
+        {{/if}}
+      </tr>
       {{/if}}
     </table>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java
index c548d23..d278fde 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/CSVParserTest.java
@@ -18,11 +18,9 @@
 
 package org.apache.ambari.view.hive.resources.upload;
 
-import com.google.gson.JsonArray;
-import com.google.gson.JsonObject;
 import org.apache.ambari.view.hive.client.Row;
-import org.apache.ambari.view.hive.resources.uploads.parsers.csv.CSVParser;
-import org.apache.ambari.view.hive.resources.uploads.parsers.json.JSONParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.csv.commonscsv.CSVParser;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -42,7 +40,7 @@ public class CSVParserTest {
 
     try(
       StringReader sr = new StringReader(csv);
-      CSVParser jp = new CSVParser(sr, null);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
       ) {
       Assert.assertEquals("There should not be any rows.",false, jp.iterator().hasNext());
     }
@@ -58,7 +56,7 @@ public class CSVParserTest {
 
     try(
       StringReader sr = new StringReader(csv);
-      CSVParser jp = new CSVParser(sr, null);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
       ) {
       Iterator<Row> iterator = jp.iterator();
 
@@ -73,7 +71,7 @@ public class CSVParserTest {
 
     try(
       StringReader sr = new StringReader(csv);
-      CSVParser jp = new CSVParser(sr, null);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
       ) {
       Iterator<Row> iterator = jp.iterator();
 
@@ -94,7 +92,7 @@ public class CSVParserTest {
 
     try(
       StringReader sr = new StringReader(csv);
-      CSVParser jp = new CSVParser(sr, null);
+      CSVParser jp = new CSVParser(sr, new ParseOptions());
     ) {
 
       Iterator<Row> iterator = jp.iterator();
@@ -109,4 +107,169 @@ public class CSVParserTest {
       Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
     }
   }
+
+
+  @Test
+  public void testQuotedEndline() throws Exception {
+
+    String csv = "\"row1-\ncol1\",1,1.1\n\"row2-\\\ncol1\",2,2.2\n";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"row1-\ncol1", "1", "1.1"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+
+      Row row2 = new Row(new Object[]{"row2-\\\ncol1", "2", "2.2"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row2, iterator.next());
+
+    }
+  }
+
+  @Test
+  public void testQuotedDoubleQuote() throws Exception {
+
+    String csv = "\"aaa\",\"b\"\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpecialEscape() throws Exception {
+
+    String csv = "\"aaa\",\"b$\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpecialEscapedEscape() throws Exception {
+
+    String csv = "aaa,b$$bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b$bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void test001Escape() throws Exception {
+
+    String csv = "aaa,b\001\"bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());    }
+  }
+
+  @Test
+  public void testSpecialQuote() throws Exception {
+
+    String csv = "\001aaa\001,\001b\001\001bb\001,\001ccc\001";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_QUOTE,'\001');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b\001bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpaceAsDelimiterAndQuoted() throws Exception {
+
+    String csv = "aaa \"b bb\" ccc\naaa2 bbb2 \"c cc2\"";
+    ParseOptions po = new ParseOptions();
+//    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001');
+    po.setOption(ParseOptions.OPTIONS_CSV_DELIMITER,' ');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+
+      Row row2 = new Row(new Object[]{"aaa2", "bbb2", "c cc2"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row2, iterator.next());
+    }
+  }
+
+  @Test
+  public void testFailedDelimiterEscaped() throws Exception {
+
+    String csv = "aaa,b\\,bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\\');
+    po.setOption(ParseOptions.OPTIONS_CSV_DELIMITER,',');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b,bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
index a367375..7362c89 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserCSVTest.java
@@ -18,10 +18,6 @@
 
 package org.apache.ambari.view.hive.resources.upload;
 
-import com.google.gson.JsonObject;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonToken;
-import com.google.gson.stream.JsonWriter;
 import org.apache.ambari.view.hive.client.ColumnDescription;
 import org.apache.ambari.view.hive.client.ColumnDescriptionShort;
 import org.apache.ambari.view.hive.client.Row;
@@ -33,7 +29,6 @@ import org.junit.Assert;
 import org.junit.Test;
 
 import java.io.IOException;
-import java.io.PrintWriter;
 import java.io.StringReader;
 
 public class DataParserCSVTest {
@@ -189,8 +184,8 @@ public class DataParserCSVTest {
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(1, pd.getPreviewRows().size());
       Assert.assertEquals(2, pd.getHeader().size());
-      ColumnDescription[] cd = {new ColumnDescriptionImpl("Column1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
-        new ColumnDescriptionImpl("Column2", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
+        new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
 
       Object cols1[] = new Object[2];
       cols1[0] = "1";

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
index 6b2f6a33..2ee92df 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserJSONTest.java
@@ -246,8 +246,8 @@ public class DataParserJSONTest {
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(1, pd.getPreviewRows().size());
       Assert.assertEquals(2, pd.getHeader().size());
-      ColumnDescription[] cd = {new ColumnDescriptionImpl("Column1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
-        new ColumnDescriptionImpl("Column2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)};
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
+        new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)};
 
       Object cols1[] = new Object[2];
       cols1[0] = "d";

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
index e5fddc7..25be565 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/DataParserXMLTest.java
@@ -278,8 +278,8 @@ public class DataParserXMLTest {
       Assert.assertNotNull(pd.getHeader());
       Assert.assertEquals(1, pd.getPreviewRows().size());
       Assert.assertEquals(2, pd.getHeader().size());
-      ColumnDescription[] cd = {new ColumnDescriptionImpl("Column1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
-        new ColumnDescriptionImpl("Column2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)};
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
+        new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)};
 
       Object cols1[] = new Object[2];
       cols1[0] = "row1-col1-Value";

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/OpenCSVParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/OpenCSVParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/OpenCSVParserTest.java
new file mode 100644
index 0000000..2c890f5
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/OpenCSVParserTest.java
@@ -0,0 +1,313 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.csv.opencsv.OpenCSVParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class OpenCSVParserTest {
+
+  /**
+   * no exception in creating csvParser with emtpy stream
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyStream() throws Exception {
+    String csv = "";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions());
+      ) {
+      Assert.assertEquals("There should not be any rows.",false, jp.iterator().hasNext());
+    }
+  }
+
+  /**
+   * in case of csv an empty line is still considered as row
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyRow() throws Exception {
+    String csv = "       ";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions());
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should not be empty",new Object[]{"       "},iterator.next().getRow());
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws Exception {
+    String csv = "value1,c,10,10.1";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions());
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws Exception {
+
+    String csv = "value1,c,10,10.1\n" +
+            "value2,c2,102,true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions());
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testQuotedAndEscapedEndline() throws Exception {
+
+    String csv = "\"row1-\ncol1\",1,1.1\n\"row2-\\\ncol1\",2,2.2\n";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"row1-\ncol1", "1", "1.1"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+
+      Row row2 = new Row(new Object[]{"row2-\ncol1", "2", "2.2"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row2, iterator.next());
+
+    }
+  }
+
+  @Test
+  public void testQuotedDoubleQuote() throws Exception {
+
+    String csv = "\"aaa\",\"b\"\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testEscapedDoubleQuote() throws Exception {
+
+    String csv = "\"aaa\",\"b\\\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+
+  @Test
+  public void testSpecialEscape() throws Exception {
+
+    String csv = "\"aaa\",\"b$\"bb\",\"ccc\"";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpecialEscapedEscape() throws Exception {
+
+    String csv = "aaa,b$$bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "b$bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+
+  @Test
+  public void testSpecialUnEscapedEscape() throws Exception {
+
+    String csv = "aaa,b$bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Row row = new Row(new Object[]{"aaa", "bbb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void test001Escape() throws Exception {
+
+    String csv = "aaa,b\001\"bb,ccc";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());    }
+  }
+
+  @Test
+  public void testSpecialQuote() throws Exception {
+
+    String csv = "\001aaa\001,\001b\001\001bb\001,\001ccc\001";
+    ParseOptions po = new ParseOptions();
+    po.setOption(ParseOptions.OPTIONS_CSV_QUOTE,'\001');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b\001bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+
+  @Test
+  public void testSpaceAsDelimiterAndQuoted() throws Exception {
+
+    String csv = "aaa \"b bb\" ccc\naaa2 bbb2 \"c cc2\"";
+    ParseOptions po = new ParseOptions();
+//    po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001');
+    po.setOption(ParseOptions.OPTIONS_CSV_DELIMITER,' ');
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b bb", "ccc"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+
+      Row row2 = new Row(new Object[]{"aaa2", "bbb2", "c cc2"});
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row2, iterator.next());
+    }
+  }
+
+  @Test
+  public void testFailedDelimiterEscaped() throws Exception {
+
+    String csv = "aaa,b\\,bb,ccc";
+    ParseOptions po = new ParseOptions();
+
+    try(
+      StringReader sr = new StringReader(csv);
+      OpenCSVParser jp = new OpenCSVParser(sr, po);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+      Row row = new Row(new Object[]{"aaa", "b","bb", "ccc"});   // different from Common CSVParser
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", row, iterator.next());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/OpenCSVTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/OpenCSVTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/OpenCSVTest.java
new file mode 100644
index 0000000..be5733f
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/OpenCSVTest.java
@@ -0,0 +1,248 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.upload;
+
+import com.opencsv.CSVParser;
+import com.opencsv.CSVReader;
+import com.opencsv.CSVWriter;
+import org.apache.ambari.view.hive.client.Row;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.Arrays;
+import java.util.Iterator;
+
+public class OpenCSVTest {
+
+  /**
+   * no exception in creating csvParser with emtpy stream
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyStream() throws Exception {
+    String csv = "";
+
+    CSVParser jp = new CSVParser();
+    String[] columns = jp.parseLine(csv);
+    Assert.assertEquals("Should detect one column.", 1, columns.length);
+    Assert.assertEquals("Should detect one column with empty value.", new String[]{""}, columns);
+  }
+
+  /**
+   * in case of csv an empty line is still considered as row
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyRow() throws Exception {
+    String csv = "       ";
+    CSVParser jp = new CSVParser();
+
+    String[] columns = jp.parseLine(csv);
+    Assert.assertEquals("One column not detected.", 1, columns.length);
+    Assert.assertArrayEquals("Row should not be empty", new String[]{"       "}, columns);
+  }
+
+  @Test
+  public void testParse1Row() throws Exception {
+    String csv = "value1,c,10,10.1";
+
+    String[] cols = csv.split(",");
+    CSVParser jp = new CSVParser();
+    String[] columns = jp.parseLine(csv);
+    Assert.assertEquals("4 columns not detect", 4, columns.length);
+    Assert.assertArrayEquals("Row not equal!", cols, columns);
+  }
+
+  @Test
+  public void testParseMultipleRow() throws Exception {
+
+    String csv = "value1,c,10,10.1\n" +
+      "value2,c2,102,true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,',','"','\\');
+    ) {
+      String[] row1 = csvReader.readNext();
+      String[] row2 = csvReader.readNext();
+
+      Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"value1", "c", "10", "10.1"}, row1);
+
+      Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2);
+    }
+  }
+
+  @Test
+  public void testParseCustomSeparator() throws Exception {
+
+    String csv = "value1#c#10#10.1\n" +
+      "value2#c2#102#true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,'#','"','\\');
+    ) {
+      String[] row1 = csvReader.readNext();
+      String[] row2 = csvReader.readNext();
+
+      Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"value1", "c", "10", "10.1"}, row1);
+
+      Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2);
+    }
+  }
+
+
+  @Test
+  public void testParseCustomSeparatorAndQuote() throws Exception {
+
+    String csv = "\"valu#e1\"#c#10#10.1\n" +
+      "value2#c2#102#true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,'#','"','\\');
+    ) {
+      String[] row1 = csvReader.readNext();
+      String[] row2 = csvReader.readNext();
+
+      Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"valu#e1", "c", "10", "10.1"}, row1);
+
+      Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2);
+    }
+  }
+
+  @Test
+  public void testParseCustomSeparatorAndCustomQuote() throws Exception {
+
+    String csv = "\'valu#e1\'#c#10#10.1\n" +
+      "value2#c2#102#true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,'#','\'','\\');
+    ) {
+      String[] row1 = csvReader.readNext();
+      String[] row2 = csvReader.readNext();
+      String[] row3 = csvReader.readNext();
+
+      Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"valu#e1", "c", "10", "10.1"}, row1);
+
+      Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2);
+
+      Assert.assertArrayEquals("should match Null", null, row3);
+    }
+  }
+
+  @Test
+  public void testWriter() throws Exception {
+
+    String csv = "\'valu#e1\'#c#10#10.1\n" +
+      "value2#c2#102#true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,'#','\'','\\');
+      StringWriter sw = new StringWriter();
+      CSVWriter csvWriter = new CSVWriter(sw);
+    ) {
+      String[] row1 = csvReader.readNext();
+      csvWriter.writeNext(row1);
+      String[] row2 = csvReader.readNext();
+      csvWriter.writeNext(row2);
+
+      Assert.assertEquals("CSVWriter failed.","\"valu#e1\",\"c\",\"10\",\"10.1\"\n" +
+        "\"value2\",\"c2\",\"102\",\"true\"\n", sw.getBuffer().toString());
+    }
+  }
+
+  @Test
+  public void testWriterCustomSeparator() throws Exception {
+
+    String csv = "\'valu#e1\'#c#10#10.1\n" +
+      "value2#c2#102#true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,'#','\'','\\');
+      StringWriter sw = new StringWriter();
+      CSVWriter csvWriter = new CSVWriter(sw,'$');
+    ) {
+      String[] row1 = csvReader.readNext();
+      csvWriter.writeNext(row1);
+      String[] row2 = csvReader.readNext();
+      csvWriter.writeNext(row2);
+
+      Assert.assertEquals("CSVWriter failed.","\"valu#e1\"$\"c\"$\"10\"$\"10.1\"\n" +
+        "\"value2\"$\"c2\"$\"102\"$\"true\"\n", sw.getBuffer().toString());
+    }
+  }
+
+  @Test
+  public void testWriterCustomSeparatorAndEnline() throws Exception {
+
+    String csv = "value1,c,10,10.1\n" +
+      "value2,c2,102,true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,',','\'','\\');
+      StringWriter sw = new StringWriter();
+      CSVWriter csvWriter = new CSVWriter(sw,'\002',',',"\003");
+    ) {
+      String[] row1 = csvReader.readNext();
+      csvWriter.writeNext(row1,false);
+      String[] row2 = csvReader.readNext();
+      csvWriter.writeNext(row2,false);
+
+      Assert.assertEquals("CSVWriter failed.","value1\002c\00210\00210.1\003" +
+        "value2\002c2\002102\002true\003", sw.getBuffer().toString());
+    }
+  }
+
+  @Test
+  public void testWriterQuote() throws Exception {
+
+    String csv = "val#ue1,c,10,10.1\n" +
+      "'val,ue2',c2,102,true\n" +
+      "val\002ue3,c\0033,103,false";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVReader csvReader = new CSVReader(sr,',','\'','\\');
+      StringWriter sw = new StringWriter();
+      CSVWriter csvWriter = new CSVWriter(sw,'\002','\'',"\003");
+    ) {
+      String[] row1 = csvReader.readNext();
+      csvWriter.writeNext(row1,false);
+      String[] row2 = csvReader.readNext();
+      csvWriter.writeNext(row2,false);
+      String[] row3 = csvReader.readNext();
+      csvWriter.writeNext(row3,false);
+
+      Assert.assertEquals("CSVWriter failed.","val#ue1\u0002c\u000210\u000210.1\u0003" +
+        "val,ue2\u0002c2\u0002102\u0002true\u0003" +
+        "'val\u0002ue3'\u0002c\u00033\u0002103\u0002false\u0003", sw.getBuffer().toString());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java
index 5941aaaa..4c4a03a 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/QueryGeneratorTest.java
@@ -24,6 +24,7 @@ import org.apache.ambari.view.hive.resources.uploads.HiveFileType;
 import org.apache.ambari.view.hive.resources.uploads.query.DeleteQueryInput;
 import org.apache.ambari.view.hive.resources.uploads.query.InsertFromQueryInput;
 import org.apache.ambari.view.hive.resources.uploads.query.QueryGenerator;
+import org.apache.ambari.view.hive.resources.uploads.query.RowFormat;
 import org.apache.ambari.view.hive.resources.uploads.query.TableInfo;
 import org.junit.Assert;
 import org.junit.Test;
@@ -42,10 +43,12 @@ public class QueryGeneratorTest {
     cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
     cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
 
-    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.TEXTFILE);
+    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.TEXTFILE, new RowFormat(',', '\\'));
 
     QueryGenerator qg = new QueryGenerator();
-    Assert.assertEquals("Create query for text file not correct ","create table tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE;",qg.generateCreateQuery(ti));
+    Assert.assertEquals("Create query for text file not correct ","CREATE TABLE tableName (col1 CHAR(10), col2 STRING," +
+      " col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','" +
+      " ESCAPED BY '\\\\' STORED AS TEXTFILE;",qg.generateCreateQuery(ti));
   }
 
   @Test
@@ -58,19 +61,40 @@ public class QueryGeneratorTest {
     cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
     cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
 
-    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.ORC);
+    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.ORC, new RowFormat(',', '\\'));
 
     QueryGenerator qg = new QueryGenerator();
-    Assert.assertEquals("Create query for text file not correct ","create table tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) STORED AS ORC;",qg.generateCreateQuery(ti));
+    Assert.assertEquals("Create query for text file not correct ","CREATE TABLE tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) STORED AS ORC;",qg.generateCreateQuery(ti));
   }
 
   @Test
-  public void testInsertFromQuery() {
+  public void testInsertWithoutUnhexFromQuery() {
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
+
+    InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable", cdl, Boolean.FALSE);
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("insert from one table to another not correct ","INSERT INTO TABLE toDB.toTable SELECT col1, col2, col3, col4, col5 FROM fromDB.fromTable;",qg.generateInsertFromQuery(ifqi));
+  }
+
+  @Test
+  public void testInsertWithUnhexFromQuery() {
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
 
-    InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable");
+    InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable", cdl, Boolean.TRUE);
 
     QueryGenerator qg = new QueryGenerator();
-    Assert.assertEquals("insert from one table to another not correct ","insert into table toDB.toTable select * from fromDB.fromTable",qg.generateInsertFromQuery(ifqi));
+    Assert.assertEquals("insert from one table to another not correct ","INSERT INTO TABLE toDB.toTable SELECT UNHEX(col1), UNHEX(col2), col3, UNHEX(col4), col5 FROM fromDB.fromTable;",qg.generateInsertFromQuery(ifqi));
   }
 
   @Test
@@ -79,6 +103,6 @@ public class QueryGeneratorTest {
     DeleteQueryInput deleteQueryInput = new DeleteQueryInput("dbName","tableName");
 
     QueryGenerator qg = new QueryGenerator();
-    Assert.assertEquals("drop table query not correct ","drop table dbName.tableName",qg.generateDropTableQuery(deleteQueryInput ));
+    Assert.assertEquals("drop table query not correct ","DROP TABLE dbName.tableName;",qg.generateDropTableQuery(deleteQueryInput ));
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java
index d94eace..2e9c2b0 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/upload/TableDataReaderTest.java
@@ -18,15 +18,18 @@
 
 package org.apache.ambari.view.hive.resources.upload;
 
+import org.apache.ambari.view.hive.client.ColumnDescription;
 import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
 import org.apache.ambari.view.hive.resources.uploads.TableDataReader;
 import org.junit.Assert;
 import org.junit.Test;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
 
 public class TableDataReaderTest {
 
@@ -75,32 +78,31 @@ public class TableDataReaderTest {
   @Test
   public void testCSVReader() throws IOException {
     RowIter rowIter = new RowIter(10,10);
+    List<ColumnDescriptionImpl> colDescs = new LinkedList<>();
+    for(int i = 0 ; i < 10 ; i++ ) {
+      ColumnDescriptionImpl cd = new ColumnDescriptionImpl("col" + (i+1) , ColumnDescription.DataTypes.STRING.toString(), i);
+      colDescs.add(cd);
+    }
 
-    TableDataReader tableDataReader = new TableDataReader(rowIter);
+    TableDataReader tableDataReader = new TableDataReader(rowIter, colDescs, false);
 
-    char[] first10 = "0,1,2,3,4,".toCharArray();
+    char del = TableDataReader.CSV_DELIMITER;
+    char[] first10 = {'0', del, '1', del, '2', del, '3', del, '4', del};
     char [] buf = new char[10];
     tableDataReader.read(buf,0,10);
 
-//    System.out.println("first10 : " + Arrays.toString(first10));
-//    System.out.println("buf : " + Arrays.toString(buf));
     Assert.assertArrayEquals(first10,buf);
 
-
-    char[] next11 = "5,6,7,8,9\n1".toCharArray();
+    char[] next11 = {'5', del, '6', del, '7', del, '8', del, '9', '\n', '1'}; //"5,6,7,8,9\n1".toCharArray();
     char [] buf1 = new char[11];
     tableDataReader.read(buf1,0,11);
 
-//    System.out.println("next11 : " + Arrays.toString(next11));
-//    System.out.println("buf1 : " + Arrays.toString(buf1));
     Assert.assertArrayEquals(next11,buf1);
 
     // read it fully
     while( tableDataReader.read(buf,0,10) != -1 );
 
-    char [] last10 = "97,98,99\n,".toCharArray(); // last comma is the left over of previous read.
-//    System.out.println("last10 : " + Arrays.toString(last10));
-//    System.out.println("buf : " + Arrays.toString(buf));
+    char [] last10 = {'9', '7', del, '9', '8', del, '9', '9', '\n', del}; //"97,98,99\n,".toCharArray(); // last comma is the left over of previous read.
 
     Assert.assertArrayEquals(last10,buf);
   }
@@ -109,7 +111,7 @@ public class TableDataReaderTest {
   public void testEmptyCSVReader() throws IOException {
     RowIter rowIter = new RowIter(0,0);
 
-    TableDataReader tableDataReader = new TableDataReader(rowIter);
+    TableDataReader tableDataReader = new TableDataReader(rowIter, null, false);
 
     char[] first10 = new char [10];
     char [] buf = new char[10];
@@ -120,8 +122,6 @@ public class TableDataReaderTest {
 
     tableDataReader.read(buf,0,10);
 
-//    System.out.println("first10 : " + Arrays.toString(first10));
-//    System.out.println("buf : " + Arrays.toString(buf));
     Assert.assertArrayEquals(first10,buf);
   }
 }


[2/2] ambari git commit: AMBARI-17421 adding opencsv parser, added support for delimiters and endline characters in values. (Nitiraj Ratjore via pallavkul)

Posted by pa...@apache.org.
AMBARI-17421 adding opencsv parser, added support for delimiters and endline characters in values. (Nitiraj Ratjore via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/33d90506
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/33d90506
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/33d90506

Branch: refs/heads/trunk
Commit: 33d9050623e38349eb4bbe7f6f20104d6bc96b3f
Parents: 8df1e8f
Author: Pallav Kulshreshtha <pa...@gmail.com>
Authored: Thu Jun 30 17:14:41 2016 +0530
Committer: Pallav Kulshreshtha <pa...@gmail.com>
Committed: Thu Jun 30 17:14:41 2016 +0530

----------------------------------------------------------------------
 contrib/views/hive/pom.xml                      |   6 +-
 .../view/hive/resources/jobs/Aggregator.java    |   2 +-
 .../view/hive/resources/jobs/JobService.java    |   1 -
 .../hive/resources/jobs/viewJobs/JobImpl.java   |  11 +
 .../view/hive/resources/uploads/CSVParams.java  |  74 +++++
 .../hive/resources/uploads/TableDataReader.java |  37 ++-
 .../view/hive/resources/uploads/TableInput.java |  45 +--
 .../resources/uploads/UploadFromHdfsInput.java  |  51 ++-
 .../hive/resources/uploads/UploadService.java   | 190 ++++++-----
 .../resources/uploads/parsers/DataParser.java   |  10 +-
 .../hive/resources/uploads/parsers/IParser.java |  13 -
 .../resources/uploads/parsers/ParseOptions.java |  12 +
 .../hive/resources/uploads/parsers/Parser.java  |  11 +-
 .../resources/uploads/parsers/RowIterator.java  |   6 +-
 .../uploads/parsers/csv/CSVIterator.java        |  57 ----
 .../uploads/parsers/csv/CSVParser.java          |  55 ----
 .../parsers/csv/commonscsv/CSVIterator.java     |  57 ++++
 .../parsers/csv/commonscsv/CSVParser.java       |  88 ++++++
 .../parsers/csv/opencsv/OpenCSVIterator.java    |  56 ++++
 .../parsers/csv/opencsv/OpenCSVParser.java      |  92 ++++++
 .../uploads/query/InsertFromQueryInput.java     |  26 +-
 .../resources/uploads/query/QueryGenerator.java |  84 +++--
 .../hive/resources/uploads/query/RowFormat.java |  57 ++++
 .../hive/resources/uploads/query/TableInfo.java |  33 +-
 .../ui/hive-web/app/controllers/upload-table.js | 303 ++++++++++++++----
 .../ui/hive-web/app/initializers/i18n.js        |  27 +-
 .../resources/ui/hive-web/app/styles/app.scss   |  10 +
 .../ui/hive-web/app/templates/upload-table.hbs  | 193 +++++++++---
 .../hive/resources/upload/CSVParserTest.java    | 179 ++++++++++-
 .../resources/upload/DataParserCSVTest.java     |   9 +-
 .../resources/upload/DataParserJSONTest.java    |   4 +-
 .../resources/upload/DataParserXMLTest.java     |   4 +-
 .../resources/upload/OpenCSVParserTest.java     | 313 +++++++++++++++++++
 .../view/hive/resources/upload/OpenCSVTest.java | 248 +++++++++++++++
 .../resources/upload/QueryGeneratorTest.java    |  40 ++-
 .../resources/upload/TableDataReaderTest.java   |  30 +-
 36 files changed, 1970 insertions(+), 464 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive/pom.xml b/contrib/views/hive/pom.xml
index 444cd98..d2cdbdd 100644
--- a/contrib/views/hive/pom.xml
+++ b/contrib/views/hive/pom.xml
@@ -69,9 +69,9 @@
       <version>1.6</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.commons</groupId>
-      <artifactId>commons-csv</artifactId>
-      <version>1.0</version>
+      <groupId>com.opencsv</groupId>
+      <artifactId>opencsv</artifactId>
+      <version>3.8</version>
     </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Aggregator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Aggregator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Aggregator.java
index 5164a4d..2f0138a 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Aggregator.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Aggregator.java
@@ -239,7 +239,7 @@ public class Aggregator {
     for (HiveQueryId hqid : queries) {
       operationIdVsHiveId.put(hqid.operationId, hqid.entity);
     }
-    LOG.info("operationIdVsHiveId : {} ", operationIdVsHiveId);
+    LOG.debug("operationIdVsHiveId : {} ", operationIdVsHiveId);
     //cover case when operationId is present, but not exists in ATS
     //e.g. optimized queries without executing jobs, like "SELECT * FROM TABLE"
     List<Job> jobs = viewJobResourceManager.readAll(new OnlyOwnersFilteringStrategy(username));

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
index a540ca0..36c2633 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
@@ -435,7 +435,6 @@ public class JobService extends BaseService {
         job.setSessionTag(null);
       }
 
-      LOG.info("allJobs : {}", allJobs);
       return allJobs;
     } catch (WebApplicationException ex) {
       LOG.error("Exception occured while fetching all jobs.", ex);

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobImpl.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobImpl.java
index 2e5f0f7..c099cae 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobImpl.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobImpl.java
@@ -309,4 +309,15 @@ public class JobImpl implements Job {
   public void setGlobalSettings(String globalSettings) {
     this.globalSettings = globalSettings;
   }
+
+  @Override
+  public String toString() {
+    return new StringBuilder("JobImpl{")
+      .append("id='").append(id)
+      .append(", owner='").append(owner)
+      .append(", hiveQueryId='").append(hiveQueryId)
+      .append(", dagId='").append(dagId)
+      .append(", queryId='").append(queryId)
+      .append('}').toString();
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/CSVParams.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/CSVParams.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/CSVParams.java
new file mode 100644
index 0000000..355ed6a
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/CSVParams.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads;
+
+import java.io.Serializable;
+
+public class CSVParams implements Serializable {
+
+  public static final char DEFAULT_DELIMITER_CHAR = ',';
+  public static final char DEFAULT_ESCAPE_CHAR = '\\';
+  public static final char DEFAULT_QUOTE_CHAR = '"';
+
+  private Character csvDelimiter;
+  private Character csvEscape;
+  private Character csvQuote;
+
+  public CSVParams() {
+  }
+
+  public CSVParams(Character csvDelimiter, Character csvQuote, Character csvEscape) {
+    this.csvDelimiter = csvDelimiter;
+    this.csvQuote = csvQuote;
+    this.csvEscape = csvEscape;
+  }
+
+  public Character getCsvDelimiter() {
+    return csvDelimiter;
+  }
+
+  public void setCsvDelimiter(Character csvDelimiter) {
+    this.csvDelimiter = csvDelimiter;
+  }
+
+  public Character getCsvEscape() {
+    return csvEscape;
+  }
+
+  public void setCsvEscape(Character csvEscape) {
+    this.csvEscape = csvEscape;
+  }
+
+  public Character getCsvQuote() {
+    return csvQuote;
+  }
+
+  public void setCsvQuote(Character csvQuote) {
+    this.csvQuote = csvQuote;
+  }
+
+  @Override
+  public String toString() {
+    return "CSVParams{" +
+      "csvDelimiter='" + csvDelimiter + '\'' +
+      ", csvEscape='" + csvEscape + '\'' +
+      ", csvQuote='" + csvQuote + '\'' +
+      '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableDataReader.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableDataReader.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableDataReader.java
index e9bdb92..7725719 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableDataReader.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableDataReader.java
@@ -18,15 +18,17 @@
 
 package org.apache.ambari.view.hive.resources.uploads;
 
+import com.opencsv.CSVWriter;
+import org.apache.ambari.view.hive.client.ColumnDescription;
 import org.apache.ambari.view.hive.client.Row;
-import org.apache.commons.csv.CSVFormat;
-import org.apache.commons.csv.CSVPrinter;
+import org.apache.commons.codec.binary.Hex;
 
 import java.io.IOException;
 import java.io.Reader;
 import java.io.StringReader;
 import java.io.StringWriter;
 import java.util.Iterator;
+import java.util.List;
 
 /**
  * Takes row iterator as input.
@@ -36,13 +38,17 @@ import java.util.Iterator;
 public class TableDataReader extends Reader {
 
   private static final int CAPACITY = 1024;
+  private final List<ColumnDescriptionImpl> header;
   private StringReader stringReader = new StringReader("");
 
   private Iterator<Row> iterator;
-  private static final CSVFormat CSV_FORMAT = CSVFormat.DEFAULT.withRecordSeparator("\n");
+  private boolean encode = false;
+  public static final char CSV_DELIMITER = '\001';
 
-  public TableDataReader(Iterator<Row> rowIterator) {
+  public TableDataReader(Iterator<Row> rowIterator, List<ColumnDescriptionImpl> header, boolean encode) {
     this.iterator = rowIterator;
+    this.encode = encode;
+    this.header = header;
   }
 
   @Override
@@ -64,9 +70,28 @@ public class TableDataReader extends Reader {
 
       if (iterator.hasNext()) { // keep reading as long as we keep getting rows
         StringWriter stringWriter = new StringWriter(CAPACITY);
-        CSVPrinter csvPrinter = new CSVPrinter(stringWriter, CSV_FORMAT);
+        CSVWriter csvPrinter = new CSVWriter(stringWriter,CSV_DELIMITER);
         Row row = iterator.next();
-        csvPrinter.printRecord(row.getRow());
+        // encode values so that \n and \r are overridden
+        Object[] columnValues = row.getRow();
+        String[] columns = new String[columnValues.length];
+
+        for(int i = 0; i < columnValues.length; i++){
+          String type = header.get(i).getType();
+          if(this.encode &&
+              (
+                ColumnDescription.DataTypes.STRING.toString().equals(type)
+                || ColumnDescription.DataTypes.VARCHAR.toString().equals(type)
+                || ColumnDescription.DataTypes.CHAR.toString().equals(type)
+              )
+            ){
+            columns[i] = Hex.encodeHexString(((String)columnValues[i]).getBytes()); //default charset
+          }else {
+            columns[i] = (String) columnValues[i];
+          }
+        }
+
+        csvPrinter.writeNext(columns,false);
         stringReader.close(); // close the old string reader
         stringReader = new StringReader(stringWriter.getBuffer().toString());
         csvPrinter.close();

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
index f7fbbba..1d5adf4 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/TableInput.java
@@ -18,20 +18,13 @@
 
 package org.apache.ambari.view.hive.resources.uploads;
 
-import java.util.List;
+import org.apache.ambari.view.hive.resources.uploads.query.TableInfo;
 
 /**
  * used as input in REST call
  */
-class TableInput {
+class TableInput extends TableInfo {
   public Boolean isFirstRowHeader = Boolean.FALSE;
-  public List<ColumnDescriptionImpl> header;
-  public String tableName;
-  public String databaseName;
-  /**
-   * the format of the file created for the table inside hive : ORC TEXTFILE etc.
-   */
-  public String fileType;
 
   public TableInput() {
   }
@@ -44,40 +37,8 @@ class TableInput {
     this.isFirstRowHeader = isFirstRowHeader;
   }
 
-  public List<ColumnDescriptionImpl> getHeader() {
-    return header;
-  }
-
-  public void setHeader(List<ColumnDescriptionImpl> header) {
-    this.header = header;
-  }
-
-  public String getTableName() {
-    return tableName;
-  }
-
-  public void setTableName(String tableName) {
-    this.tableName = tableName;
-  }
-
-  public String getDatabaseName() {
-    return databaseName;
-  }
-
-  public void setDatabaseName(String databaseName) {
-    this.databaseName = databaseName;
-  }
-
-  public String getFileType() {
-    return fileType;
-  }
-
-  public void setFileType(String fileType) {
-    this.fileType = fileType;
-  }
-
   public void validate(){
-    if( null == this.getFileType()){
+    if( null == this.getHiveFileType()){
       throw new IllegalArgumentException("fileType parameter cannot be null.");
     }
     if( null == this.getTableName()){

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
index 14bd27a..af20aff 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadFromHdfsInput.java
@@ -19,6 +19,7 @@
 package org.apache.ambari.view.hive.resources.uploads;
 
 import java.io.Serializable;
+import java.util.List;
 
 public class UploadFromHdfsInput implements Serializable{
   private Boolean isFirstRowHeader = Boolean.FALSE;
@@ -26,16 +27,54 @@ public class UploadFromHdfsInput implements Serializable{
   private String hdfsPath;
   private String tableName;
   private String databaseName;
+  private List<ColumnDescriptionImpl> header;
+  private boolean containsEndlines;
+
+  private String csvDelimiter;
+  private String csvEscape;
+  private String csvQuote;
 
   public UploadFromHdfsInput() {
   }
 
-  public UploadFromHdfsInput(Boolean isFirstRowHeader, String inputFileType, String hdfsPath, String tableName, String databaseName) {
-    this.isFirstRowHeader = isFirstRowHeader;
-    this.inputFileType = inputFileType;
-    this.hdfsPath = hdfsPath;
-    this.tableName = tableName;
-    this.databaseName = databaseName;
+  public String getCsvDelimiter() {
+    return csvDelimiter;
+  }
+
+  public List<ColumnDescriptionImpl> getHeader() {
+    return header;
+  }
+
+  public void setHeader(List<ColumnDescriptionImpl> header) {
+    this.header = header;
+  }
+
+  public boolean isContainsEndlines() {
+    return containsEndlines;
+  }
+
+  public void setContainsEndlines(boolean containsEndlines) {
+    this.containsEndlines = containsEndlines;
+  }
+
+  public void setCsvDelimiter(String csvDelimiter) {
+    this.csvDelimiter = csvDelimiter;
+  }
+
+  public String getCsvEscape() {
+    return csvEscape;
+  }
+
+  public void setCsvEscape(String csvEscape) {
+    this.csvEscape = csvEscape;
+  }
+
+  public String getCsvQuote() {
+    return csvQuote;
+  }
+
+  public void setCsvQuote(String csvQuote) {
+    this.csvQuote = csvQuote;
   }
 
   public Boolean getIsFirstRowHeader() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
index ad10751..a83d17d 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
@@ -29,24 +29,38 @@ import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobResourceManager;
 import org.apache.ambari.view.hive.resources.uploads.parsers.DataParser;
 import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
 import org.apache.ambari.view.hive.resources.uploads.parsers.PreviewData;
-import org.apache.ambari.view.hive.resources.uploads.query.*;
-import org.apache.ambari.view.hive.utils.HiveClientFormattedException;
+import org.apache.ambari.view.hive.resources.uploads.query.DeleteQueryInput;
+import org.apache.ambari.view.hive.resources.uploads.query.InsertFromQueryInput;
+import org.apache.ambari.view.hive.resources.uploads.query.QueryGenerator;
+import org.apache.ambari.view.hive.resources.uploads.query.TableInfo;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.apache.ambari.view.utils.ambari.AmbariApi;
 import org.apache.commons.io.input.ReaderInputStream;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
 import org.json.simple.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.ws.rs.*;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-import java.io.*;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
 import java.lang.reflect.InvocationTargetException;
-import java.util.*;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 /**
  * UI driven end points for creation of new hive table and inserting data into it.
@@ -117,7 +131,8 @@ public class UploadService extends BaseService {
     try {
       uploadedInputStream = getHDFSFileStream(input.getHdfsPath());
       this.validateForPreview(input);
-      PreviewData pd = generatePreview(input.getIsFirstRowHeader(), input.getInputFileType(), uploadedInputStream);
+      CSVParams csvParams = getCsvParams(input.getCsvDelimiter(), input.getCsvQuote(), input.getCsvEscape());
+      PreviewData pd = generatePreview(input.getIsFirstRowHeader(), input.getInputFileType(), csvParams, uploadedInputStream);
       String tableName = getBasenameFromPath(input.getHdfsPath());
       return createPreviewResponse(pd, input.getIsFirstRowHeader(), tableName);
     } catch (WebApplicationException e) {
@@ -144,7 +159,10 @@ public class UploadService extends BaseService {
     @FormDataParam("file") InputStream uploadedInputStream,
     @FormDataParam("file") FormDataContentDisposition fileDetail,
     @FormDataParam("isFirstRowHeader") Boolean isFirstRowHeader,
-    @FormDataParam("inputFileType") String inputFileType
+    @FormDataParam("inputFileType") String inputFileType,
+    @FormDataParam("csvDelimiter") String csvDelimiter,
+    @FormDataParam("csvEscape") String csvEscape,
+    @FormDataParam("csvQuote") String csvQuote
   ) {
     try {
       if( null == inputFileType)
@@ -153,7 +171,9 @@ public class UploadService extends BaseService {
       if( null == isFirstRowHeader )
         isFirstRowHeader = false;
 
-      PreviewData pd = generatePreview(isFirstRowHeader, inputFileType, uploadedInputStream);
+      CSVParams csvParams = getCsvParams(csvDelimiter, csvQuote, csvEscape);
+
+      PreviewData pd = generatePreview(isFirstRowHeader, inputFileType, csvParams, uploadedInputStream);
       return createPreviewResponse(pd, isFirstRowHeader, getBasename(fileDetail.getFileName()));
     } catch (WebApplicationException e) {
       LOG.error(getErrorMessage(e), e);
@@ -164,6 +184,35 @@ public class UploadService extends BaseService {
     }
   }
 
+  private CSVParams getCsvParams(String csvDelimiter, String csvQuote, String csvEscape) {
+    char csvq =  CSVParams.DEFAULT_QUOTE_CHAR;
+    char csvd =  CSVParams.DEFAULT_DELIMITER_CHAR;
+    char csve =  CSVParams.DEFAULT_ESCAPE_CHAR;
+
+    if(null != csvDelimiter){
+      char[] csvdArray = csvDelimiter.toCharArray();
+      if(csvdArray.length > 0 ) {
+        csvd = csvdArray[0];
+      }
+    }
+
+    if(null != csvQuote){
+      char[] csvqArray = csvQuote.toCharArray();
+      if(csvqArray.length > 0 ) {
+        csvq = csvqArray[0];
+      }
+    }
+
+    if(null != csvEscape){
+      char[] csveArray = csvEscape.toCharArray();
+      if(csveArray.length > 0 ) {
+        csve = csveArray[0];
+      }
+    }
+
+    return new CSVParams(csvd, csvq, csve);
+  }
+
 
   @Path("/createTable")
   @POST
@@ -172,15 +221,8 @@ public class UploadService extends BaseService {
   public Job createTable(TableInput tableInput) {
     try {
       tableInput.validate();
-      List<ColumnDescriptionImpl> header = tableInput.getHeader();
       String databaseName = tableInput.getDatabaseName();
-      String tableName = tableInput.getTableName();
-      Boolean isFirstRowHeader = tableInput.getIsFirstRowHeader();
-      String fileTypeStr = tableInput.getFileType();
-      HiveFileType hiveFileType = HiveFileType.valueOf(fileTypeStr);
-
-      TableInfo ti = new TableInfo(databaseName, tableName, header, hiveFileType);
-      String tableCreationQuery = generateCreateQuery(ti);
+      String tableCreationQuery = generateCreateQuery(tableInput);
       LOG.info("tableCreationQuery : {}", tableCreationQuery);
 
       Job job = createJob(tableCreationQuery, databaseName);
@@ -200,48 +242,30 @@ public class UploadService extends BaseService {
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
   public Response uploadFileFromHdfs(UploadFromHdfsInput input) {
-    if (ParseOptions.InputFileType.CSV.toString().equals(input.getInputFileType()) && input.getIsFirstRowHeader().equals(Boolean.FALSE)) {
-      try {
-        // upload using the LOAD query
-        LoadQueryInput loadQueryInput = new LoadQueryInput(input.getHdfsPath(), input.getDatabaseName(), input.getTableName());
-        String loadQuery = new QueryGenerator().generateLoadQuery(loadQueryInput);
-        Job job = createJob(loadQuery, input.getDatabaseName());
-
-        JSONObject jo = new JSONObject();
-        jo.put("jobId", job.getId());
-        return Response.ok(jo).build();
-      } catch (WebApplicationException e) {
-        LOG.error(getErrorMessage(e), e);
-        throw e;
-      } catch (Exception e) {
-        LOG.error(e.getMessage(), e);
-        throw new ServiceFormattedException(e);
-      }
-    } else {
-      // create stream and upload
-      InputStream hdfsStream = null;
-      try {
-        hdfsStream = getHDFSFileStream(input.getHdfsPath());
-        String path = uploadFileFromStream(hdfsStream, input.getIsFirstRowHeader(), input.getInputFileType(), input.getTableName(), input.getDatabaseName());
-
-        JSONObject jo = new JSONObject();
-        jo.put("uploadedPath", path);
-
-        return Response.ok(jo).build();
-      } catch (WebApplicationException e) {
-        LOG.error(getErrorMessage(e), e);
-        throw e;
-      } catch (Exception e) {
-        LOG.error(e.getMessage(), e);
-        throw new ServiceFormattedException(e);
-      } finally {
-        if (null != hdfsStream)
-          try {
-            hdfsStream.close();
-          } catch (IOException e) {
-            LOG.error("Exception occured while closing the HDFS stream for path : " + input.getHdfsPath(), e);
-          }
-      }
+    // create stream and upload
+    InputStream hdfsStream = null;
+    try {
+      hdfsStream = getHDFSFileStream(input.getHdfsPath());
+      CSVParams csvParams = getCsvParams(input.getCsvDelimiter(), input.getCsvQuote(), input.getCsvEscape());
+      String path = uploadFileFromStream(hdfsStream, input.getIsFirstRowHeader(), input.getInputFileType(), input.getTableName(), input.getDatabaseName(), input.getHeader(), input.isContainsEndlines(), csvParams);
+
+      JSONObject jo = new JSONObject();
+      jo.put("uploadedPath", path);
+
+      return Response.ok(jo).build();
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
+    } catch (Exception e) {
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
+    } finally {
+      if (null != hdfsStream)
+        try {
+          hdfsStream.close();
+        } catch (IOException e) {
+          LOG.error("Exception occured while closing the HDFS stream for path : " + input.getHdfsPath(), e);
+        }
     }
   }
 
@@ -255,10 +279,19 @@ public class UploadService extends BaseService {
     @FormDataParam("isFirstRowHeader") Boolean isFirstRowHeader,
     @FormDataParam("inputFileType") String inputFileType,   // the format of the file uploaded. CSV/JSON etc.
     @FormDataParam("tableName") String tableName,
-    @FormDataParam("databaseName") String databaseName
+    @FormDataParam("databaseName") String databaseName,
+    @FormDataParam("header") String header,
+    @FormDataParam("containsEndlines") boolean containsEndlines,
+    @FormDataParam("csvDelimiter") String csvDelimiter,
+    @FormDataParam("csvEscape") String csvEscape,
+    @FormDataParam("csvQuote") String csvQuote
+
   ) {
     try {
-      String path = uploadFileFromStream(uploadedInputStream, isFirstRowHeader, inputFileType, tableName, databaseName);
+      CSVParams csvParams = getCsvParams(csvDelimiter, csvQuote, csvEscape);
+      ObjectMapper mapper = new ObjectMapper();
+      List<ColumnDescriptionImpl> columnList = mapper.readValue(header, new TypeReference<List<ColumnDescriptionImpl>>(){});
+      String path = uploadFileFromStream(uploadedInputStream, isFirstRowHeader, inputFileType, tableName, databaseName, columnList, containsEndlines, csvParams);
 
       JSONObject jo = new JSONObject();
       jo.put("uploadedPath", path);
@@ -370,14 +403,13 @@ public class UploadService extends BaseService {
   }
 
   private Job createJob(String query, String databaseName) throws InvocationTargetException, IllegalAccessException, ItemNotFound {
-    Map jobInfo = new HashMap<String, String>();
-    jobInfo.put("title", "Internal Table Creation");
+    Map jobInfo = new HashMap<>();
+    jobInfo.put("title", "Internal Job");
     jobInfo.put("forcedContent", query);
     jobInfo.put("dataBase", databaseName);
 
-    LOG.info("jobInfo : " + jobInfo);
     Job job = new JobImpl(jobInfo);
-    LOG.info("job : " + job);
+    LOG.info("creating job : {}", job);
     getResourceManager().create(job);
 
     JobController createdJobController = getResourceManager().readController(job.getId());
@@ -414,7 +446,7 @@ public class UploadService extends BaseService {
     else return e.getMessage();
   }
 
-  private PreviewData generatePreview(Boolean isFirstRowHeader, String inputFileType, InputStream uploadedInputStream) throws Exception {
+  private PreviewData generatePreview(Boolean isFirstRowHeader, String inputFileType, CSVParams csvParams, InputStream uploadedInputStream) throws Exception {
     ParseOptions parseOptions = new ParseOptions();
     parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, inputFileType);
     if (inputFileType.equals(ParseOptions.InputFileType.CSV.toString())){
@@ -422,6 +454,10 @@ public class UploadService extends BaseService {
         parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
       else
         parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.NONE.toString());
+
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_DELIMITER, csvParams.getCsvDelimiter());
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR, csvParams.getCsvEscape());
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_QUOTE, csvParams.getCsvQuote());
     }
     else
       parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
@@ -434,7 +470,7 @@ public class UploadService extends BaseService {
   }
 
   private Response createPreviewResponse(PreviewData pd, Boolean isFirstRowHeader, String tableName) {
-    Map<String, Object> retData = new HashMap<String, Object>();
+    Map<String, Object> retData = new HashMap<>();
     retData.put("header", pd.getHeader());
     retData.put("rows", pd.getPreviewRows());
     retData.put("isFirstRowHeader", isFirstRowHeader);
@@ -454,19 +490,29 @@ public class UploadService extends BaseService {
     Boolean isFirstRowHeader,
     String inputFileType,   // the format of the file uploaded. CSV/JSON etc.
     String tableName,
-    String databaseName
-
+    String databaseName,
+    List<ColumnDescriptionImpl> header,
+    boolean containsEndlines,
+    CSVParams csvParams
   ) throws Exception {
     LOG.info(" uploading file into databaseName {}, tableName {}", databaseName, tableName);
     ParseOptions parseOptions = new ParseOptions();
     parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, inputFileType);
+    if(isFirstRowHeader){
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+    }else{
+      parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.NONE.toString());
+    }
 
-    DataParser dataParser = new DataParser(new InputStreamReader(uploadedInputStream), parseOptions);
+    if(null != csvParams){
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_DELIMITER, csvParams.getCsvDelimiter());
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR, csvParams.getCsvEscape());
+      parseOptions.setOption(ParseOptions.OPTIONS_CSV_QUOTE, csvParams.getCsvQuote());
+    }
 
-    if (inputFileType.equals(ParseOptions.InputFileType.CSV.toString()) && isFirstRowHeader)
-      dataParser.extractHeader(); // removes the header line if any from the stream
+    DataParser dataParser = new DataParser(new InputStreamReader(uploadedInputStream), parseOptions);
 
-    Reader csvReader = dataParser.getTableDataReader();
+    Reader csvReader = new TableDataReader(dataParser.iterator(), header, containsEndlines); // encode column values into HEX so that \n etc dont appear in the hive table data
     String path = uploadIntoTable(csvReader, databaseName, tableName);
     return path;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/DataParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/DataParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/DataParser.java
index d03dd7e..fe2c740 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/DataParser.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/DataParser.java
@@ -19,11 +19,10 @@
 package org.apache.ambari.view.hive.resources.uploads.parsers;
 
 import org.apache.ambari.view.hive.client.Row;
-import org.apache.ambari.view.hive.resources.uploads.parsers.csv.CSVParser;
+import org.apache.ambari.view.hive.resources.uploads.parsers.csv.opencsv.OpenCSVParser;
 import org.apache.ambari.view.hive.resources.uploads.parsers.json.JSONParser;
 import org.apache.ambari.view.hive.resources.uploads.parsers.xml.XMLParser;
 
-import java.io.IOException;
 import java.io.Reader;
 import java.util.Iterator;
 
@@ -37,7 +36,7 @@ public class DataParser implements IParser {
 
   public DataParser(Reader reader, ParseOptions parseOptions) throws Exception {
     if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.CSV.toString())) {
-      parser = new CSVParser(reader, parseOptions);
+      parser = new OpenCSVParser(reader, parseOptions);
     } else if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.JSON.toString())) {
       parser = new JSONParser(reader, parseOptions);
     } else if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.XML.toString())) {
@@ -46,11 +45,6 @@ public class DataParser implements IParser {
   }
 
   @Override
-  public Reader getTableDataReader() {
-    return parser.getTableDataReader();
-  }
-
-  @Override
   public PreviewData parsePreview() {
     return parser.parsePreview();
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/IParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/IParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/IParser.java
index 8b75c04..4f4dc37 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/IParser.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/IParser.java
@@ -18,27 +18,14 @@
 
 package org.apache.ambari.view.hive.resources.uploads.parsers;
 
-import org.apache.ambari.view.hive.client.ColumnDescription;
 import org.apache.ambari.view.hive.client.Row;
 
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.Reader;
-import java.util.List;
-
 /**
  * Interface defining methods for Parsers that can used for generating preview
  * and uploading table into hive.
  */
 public interface IParser extends Iterable<Row>, AutoCloseable{
 
-  /**
-   * @return returns the Reader that can be read to get the table data as CSV Text Data that can be uploaded directly
-   * to HDFS
-   */
-  Reader getTableDataReader();
-
   PreviewData parsePreview();
 
   Row extractHeader();

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
index e592b5f..3db4813 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/ParseOptions.java
@@ -21,6 +21,11 @@ package org.apache.ambari.view.hive.resources.uploads.parsers;
 import java.util.HashMap;
 
 public class ParseOptions {
+  public static final String OPTIONS_CSV_DELIMITER = "OPTIONS_CSV_DELIMITER";
+  public static final String OPTIONS_CSV_QUOTE = "OPTIONS_CSV_QUOTE";
+  public static final String OPTIONS_HEADERS = "OPTIONS_HEADERS";
+  public static final String OPTIONS_CSV_ESCAPE_CHAR = "OPTIONS_CSV_ESCAPE_CHAR";
+
   public enum InputFileType {
     CSV,
     JSON,
@@ -46,4 +51,11 @@ public class ParseOptions {
   public Object getOption(String key) {
     return this.options.get(key);
   }
+
+  @Override
+  public String toString() {
+    return new StringBuilder("ParseOptions{")
+      .append("options=").append(options)
+      .append('}').toString();
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
index 49f47c7..782b088 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/Parser.java
@@ -21,7 +21,6 @@ package org.apache.ambari.view.hive.resources.uploads.parsers;
 import org.apache.ambari.view.hive.client.ColumnDescription;
 import org.apache.ambari.view.hive.client.Row;
 import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
-import org.apache.ambari.view.hive.resources.uploads.TableDataReader;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -39,6 +38,7 @@ public abstract class Parser implements IParser {
 
   protected final static Logger LOG =
     LoggerFactory.getLogger(Parser.class);
+  public static final String COLUMN_PREFIX = "column";
 
   protected Reader reader; // same as CSV reader in this case
   protected ParseOptions parseOptions;
@@ -70,11 +70,6 @@ public abstract class Parser implements IParser {
   }
 
   @Override
-  public Reader getTableDataReader() {
-    return new TableDataReader(this.iterator());
-  }
-
-  @Override
   public PreviewData parsePreview() {
     LOG.info("generating preview for : {}", this.parseOptions );
 
@@ -88,7 +83,7 @@ public abstract class Parser implements IParser {
     }
 
     int numberOfRows = numberOfPreviewRows;
-    previewRows = new ArrayList<>(numberOfPreviewRows + 1); // size including the header.
+    previewRows = new ArrayList<>(numberOfPreviewRows);
 
     Row headerRow = null;
     Integer numOfCols = null;
@@ -152,7 +147,7 @@ public abstract class Parser implements IParser {
       ColumnDescription.DataTypes type = getLikelyDataType(previewRows,colNum);
       LOG.info("datatype detected for column {} : {}", colNum, type);
 
-      String colName = "Column" + (colNum + 1);
+      String colName = COLUMN_PREFIX + (colNum + 1);
       if (null != headerRow)
         colName = (String) headerRow.getRow()[colNum];
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java
index 69fe864..2dc8c22 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/RowIterator.java
@@ -41,8 +41,10 @@ public class RowIterator implements Iterator<Row> {
   public RowIterator(RowMapIterator iterator) {
     this.iterator = iterator;
     LinkedHashMap<String, String> obj = iterator.peek();
-    if (null != obj)
-      headers = new LinkedList<>(obj.keySet());
+    headers = new LinkedList<>();
+    if (null != obj) {
+      headers.addAll(obj.keySet());
+    }
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java
deleted file mode 100644
index 3342f49..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVIterator.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.uploads.parsers.csv;
-
-import org.apache.ambari.view.hive.client.Row;
-import org.apache.commons.csv.CSVRecord;
-
-import java.util.Iterator;
-
-/**
- * iterates over the input CSV records and generates Row objects
- */
-class CSVIterator implements Iterator<Row> {
-
-  private Iterator<CSVRecord> iterator;
-
-  public CSVIterator(Iterator<CSVRecord> iterator) {
-    this.iterator = iterator;
-  }
-
-  @Override
-  public boolean hasNext() {
-    return iterator.hasNext();
-  }
-
-  @Override
-  public Row next() {
-    CSVRecord row = iterator.next();
-    Object[] values = new Object[row.size()];
-    for (int i = 0; i < values.length; i++) {
-      values[i] = row.get(i);
-    }
-    Row r = new Row(values);
-    return r;
-  }
-
-  @Override
-  public void remove() {
-    this.iterator.remove();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java
deleted file mode 100644
index a48041c..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/CSVParser.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.hive.resources.uploads.parsers.csv;
-
-import org.apache.ambari.view.hive.client.Row;
-import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
-import org.apache.ambari.view.hive.resources.uploads.parsers.Parser;
-import org.apache.commons.csv.CSVFormat;
-
-import java.io.*;
-import java.util.*;
-
-/**
- * Parses the given Reader which contains CSV stream and extracts headers and rows, and detect datatypes of columns
- */
-public class CSVParser extends Parser {
-
-  private CSVIterator iterator;
-  private org.apache.commons.csv.CSVParser parser;
-
-  public CSVParser(Reader reader, ParseOptions parseOptions) throws IOException {
-    super(reader, parseOptions);
-    parser = new org.apache.commons.csv.CSVParser(this.reader, CSVFormat.EXCEL);
-    iterator = new CSVIterator(parser.iterator());
-  }
-
-  @Override
-  public Row extractHeader() {
-    return this.iterator().next();
-  }
-
-  @Override
-  public void close() throws Exception {
-    this.parser.close();
-  }
-
-  public Iterator<Row> iterator() {
-    return iterator; // only one iterator per parser.
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/commonscsv/CSVIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/commonscsv/CSVIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/commonscsv/CSVIterator.java
new file mode 100644
index 0000000..e50a87c
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/commonscsv/CSVIterator.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.csv.commonscsv;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.commons.csv.CSVRecord;
+
+import java.util.Iterator;
+
+/**
+ * iterates over the input CSV records and generates Row objects
+ */
+class CSVIterator implements Iterator<Row> {
+
+  private Iterator<CSVRecord> iterator;
+
+  public CSVIterator(Iterator<CSVRecord> iterator) {
+    this.iterator = iterator;
+  }
+
+  @Override
+  public boolean hasNext() {
+    return iterator.hasNext();
+  }
+
+  @Override
+  public Row next() {
+    CSVRecord row = iterator.next();
+    Object[] values = new Object[row.size()];
+    for (int i = 0; i < values.length; i++) {
+      values[i] = row.get(i);
+    }
+    Row r = new Row(values);
+    return r;
+  }
+
+  @Override
+  public void remove() {
+    this.iterator.remove();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/commonscsv/CSVParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/commonscsv/CSVParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/commonscsv/CSVParser.java
new file mode 100644
index 0000000..ea9c9fb
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/commonscsv/CSVParser.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive.resources.uploads.parsers.csv.commonscsv;
+
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.Parser;
+import org.apache.commons.csv.CSVFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Iterator;
+
+/**
+ * Parses the given Reader which contains CSV stream and extracts headers and rows, and detect datatypes of columns
+ */
+public class CSVParser extends Parser {
+  private CSVIterator iterator;
+  private org.apache.commons.csv.CSVParser parser;
+  private final static Logger LOG =
+    LoggerFactory.getLogger(CSVParser.class);
+
+  public CSVParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    CSVFormat format = CSVFormat.DEFAULT;
+    String optHeader =  (String)parseOptions.getOption(ParseOptions.OPTIONS_HEADER);
+    if(optHeader != null){
+      if(optHeader.equals(ParseOptions.HEADER.FIRST_RECORD.toString())) {
+        format = format.withHeader();
+      }else if( optHeader.equals(ParseOptions.HEADER.PROVIDED_BY_USER.toString())){
+        String [] headers = (String[]) parseOptions.getOption(ParseOptions.OPTIONS_HEADERS);
+        format = format.withHeader(headers);
+      }
+    }
+
+    Character delimiter = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_DELIMITER);
+    if(delimiter != null){
+      LOG.info("setting delimiter as {}", delimiter);
+      format = format.withDelimiter(delimiter);
+    }
+
+    Character quote = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_QUOTE);
+    if( null != quote ){
+      LOG.info("setting Quote char : {}", quote);
+      format = format.withQuote(quote);
+    }
+
+    Character escape = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR);
+    if(escape != null){
+      LOG.info("setting escape as {}", escape);
+      format = format.withEscape(escape);
+    }
+
+    parser = new org.apache.commons.csv.CSVParser(this.reader,format );
+    iterator = new CSVIterator(parser.iterator());
+  }
+
+  @Override
+  public Row extractHeader() {
+    return new Row(parser.getHeaderMap().keySet().toArray());
+  }
+
+  @Override
+  public void close() throws Exception {
+    this.parser.close();
+  }
+
+  public Iterator<Row> iterator() {
+    return iterator; // only one iterator per parser.
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/opencsv/OpenCSVIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/opencsv/OpenCSVIterator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/opencsv/OpenCSVIterator.java
new file mode 100644
index 0000000..3f605cb
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/opencsv/OpenCSVIterator.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.parsers.csv.opencsv;
+
+import org.apache.ambari.view.hive.client.Row;
+
+import java.util.Iterator;
+
+/**
+ * iterates over the input CSV records and generates Row objects
+ */
+class OpenCSVIterator implements Iterator<Row> {
+
+  private Iterator<String[]> iterator;
+
+  public OpenCSVIterator(Iterator<String[]> iterator) {
+    this.iterator = iterator;
+  }
+
+  @Override
+  public boolean hasNext() {
+    return iterator.hasNext();
+  }
+
+  @Override
+  public Row next() {
+    String[] row = iterator.next();
+    Object[] values = new Object[row.length];
+    for (int i = 0; i < values.length; i++) {
+      values[i] = row[i];
+    }
+    Row r = new Row(values);
+    return r;
+  }
+
+  @Override
+  public void remove() {
+    this.iterator.remove();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/opencsv/OpenCSVParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/opencsv/OpenCSVParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/opencsv/OpenCSVParser.java
new file mode 100644
index 0000000..0109e91
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/parsers/csv/opencsv/OpenCSVParser.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.hive.resources.uploads.parsers.csv.opencsv;
+
+import com.opencsv.CSVParserBuilder;
+import com.opencsv.CSVReader;
+import com.opencsv.CSVReaderBuilder;
+import org.apache.ambari.view.hive.client.Row;
+import org.apache.ambari.view.hive.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive.resources.uploads.parsers.Parser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Iterator;
+
+/**
+ * Parses the given Reader which contains CSV stream and extracts headers and rows
+ */
+public class OpenCSVParser extends Parser {
+  private Row headerRow;
+  private OpenCSVIterator iterator;
+  private CSVReader csvReader = null;
+  private final static Logger LOG =
+    LoggerFactory.getLogger(OpenCSVParser.class);
+
+  public OpenCSVParser(Reader reader, ParseOptions parseOptions) throws IOException {
+    super(reader, parseOptions);
+    CSVParserBuilder csvParserBuilder = new CSVParserBuilder();
+    CSVReaderBuilder builder =  new CSVReaderBuilder(reader);
+
+    Character delimiter = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_DELIMITER);
+    if(delimiter != null){
+      LOG.info("setting delimiter as {}", delimiter);
+      csvParserBuilder = csvParserBuilder.withSeparator(delimiter);
+    }
+
+    Character quote = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_QUOTE);
+    if( null != quote ){
+      LOG.info("setting Quote char : {}", quote);
+      csvParserBuilder = csvParserBuilder.withQuoteChar(quote);
+    }
+
+    Character escapeChar = (Character) parseOptions.getOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR);
+    if( null != escapeChar ){
+      LOG.info("setting escapeChar : {}", escapeChar);
+      csvParserBuilder = csvParserBuilder.withEscapeChar(escapeChar);
+    }
+
+    builder.withCSVParser(csvParserBuilder.build());
+    this.csvReader = builder.build();
+    iterator = new OpenCSVIterator(this.csvReader.iterator());
+
+    String optHeader =  (String)parseOptions.getOption(ParseOptions.OPTIONS_HEADER);
+    if(optHeader != null){
+      if(optHeader.equals(ParseOptions.HEADER.FIRST_RECORD.toString())) {
+        this.headerRow = iterator().hasNext() ? iterator.next() : new Row(new Object[]{});
+      }
+    }
+
+  }
+
+  @Override
+  public Row extractHeader() {
+    return headerRow;
+  }
+
+  @Override
+  public void close() throws Exception {
+    this.csvReader.close();
+  }
+
+  public Iterator<Row> iterator() {
+    return iterator; // only one iterator per parser.
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java
index 5befc51..c568e0b 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/InsertFromQueryInput.java
@@ -18,20 +18,44 @@
 
 package org.apache.ambari.view.hive.resources.uploads.query;
 
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+
+import java.util.List;
+
 public class InsertFromQueryInput {
   private String fromDatabase;
   private String fromTable;
   private String toDatabase;
   private String toTable;
+  private List<ColumnDescriptionImpl> header;
+  private Boolean unhexInsert = Boolean.FALSE;
 
   public InsertFromQueryInput() {
   }
 
-  public InsertFromQueryInput(String fromDatabase, String fromTable, String toDatabase, String toTable) {
+  public InsertFromQueryInput(String fromDatabase, String fromTable, String toDatabase, String toTable, List<ColumnDescriptionImpl> header, Boolean unhexInsert) {
     this.fromDatabase = fromDatabase;
     this.fromTable = fromTable;
     this.toDatabase = toDatabase;
     this.toTable = toTable;
+    this.header = header;
+    this.unhexInsert = unhexInsert;
+  }
+
+  public List<ColumnDescriptionImpl> getHeader() {
+    return header;
+  }
+
+  public void setHeader(List<ColumnDescriptionImpl> header) {
+    this.header = header;
+  }
+
+  public Boolean getUnhexInsert() {
+    return unhexInsert;
+  }
+
+  public void setUnhexInsert(Boolean unhexInsert) {
+    this.unhexInsert = unhexInsert;
   }
 
   public String getFromDatabase() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java
index 6bab229..6db89e0 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/QueryGenerator.java
@@ -19,7 +19,8 @@
 package org.apache.ambari.view.hive.resources.uploads.query;
 
 import org.apache.ambari.view.hive.client.ColumnDescription;
-import org.apache.ambari.view.hive.resources.uploads.*;
+import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive.resources.uploads.HiveFileType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -36,10 +37,10 @@ public class QueryGenerator {
 
   public String generateCreateQuery(TableInfo tableInfo) {
     String tableName = tableInfo.getTableName();
-    List<ColumnDescriptionImpl> cdList = tableInfo.getColumns();
+    List<ColumnDescriptionImpl> cdList = tableInfo.getHeader();
 
     StringBuilder query = new StringBuilder();
-    query.append("create table " + tableName + " (");
+    query.append("CREATE TABLE ").append(tableName).append(" (");
     Collections.sort(cdList, new Comparator<ColumnDescription>() {
       @Override
       public int compare(ColumnDescription o1, ColumnDescription o2) {
@@ -55,7 +56,7 @@ public class QueryGenerator {
         query.append(", ");
       }
 
-      query.append(cd.getName() + " " + cd.getType());
+      query.append(cd.getName()).append(" ").append(cd.getType());
       if (cd.getPrecision() != null) {
         query.append("(").append(cd.getPrecision());
         if (cd.getScale() != null) {
@@ -68,31 +69,74 @@ public class QueryGenerator {
 
     query.append(")");
 
-    if (tableInfo.getHiveFileType() == HiveFileType.TEXTFILE)
-      query.append(" ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE;");
-    else
-      query.append(" STORED AS " + tableInfo.getHiveFileType() + ";");
-
-    String queryString = query.toString();
+    if(tableInfo.getHiveFileType().equals(HiveFileType.TEXTFILE)) {
+      query.append(getRowFormatQuery(tableInfo.getRowFormat()));
+    }
+    query.append(" STORED AS ").append(tableInfo.getHiveFileType().toString());
+    String queryString = query.append(";").toString();
     LOG.info("Query : {}", queryString);
     return queryString;
   }
 
+  private String getRowFormatQuery(RowFormat rowFormat) {
+    StringBuilder sb = new StringBuilder();
+    if(rowFormat != null) {
+      sb.append(" ROW FORMAT DELIMITED");
+      if(rowFormat.getFieldsTerminatedBy() != null ){
+        sb.append(" FIELDS TERMINATED BY '").append(rowFormat.getFieldsTerminatedBy()).append('\'');
+      }
+      if(rowFormat.getEscapedBy() != null){
+        String escape = String.valueOf(rowFormat.getEscapedBy());
+        if(rowFormat.getEscapedBy() == '\\'){
+          escape = escape + '\\'; // special handling of slash as its escape char for strings in hive as well.
+        }
+        sb.append(" ESCAPED BY '").append(escape).append('\'');
+      }
+    }
+
+    return sb.toString();
+  }
+
   public String generateInsertFromQuery(InsertFromQueryInput ifqi) {
-    String insertQuery = "insert into table " + ifqi.getToDatabase() + "." + ifqi.getToTable() + " select * from " + ifqi.getFromDatabase() + "." + ifqi.getFromTable();
-    LOG.info("Insert Query : {}", insertQuery);
-    return insertQuery;
+    StringBuilder insertQuery = new StringBuilder("INSERT INTO TABLE ").append(ifqi.getToDatabase()).append(".")
+                                .append(ifqi.getToTable()).append(" SELECT ");
+
+    boolean first = true;
+    for(ColumnDescriptionImpl column : ifqi.getHeader()){
+      String type = column.getType();
+      boolean unhex = ifqi.getUnhexInsert() && (
+        ColumnDescription.DataTypes.STRING.toString().equals(type)
+          || ColumnDescription.DataTypes.VARCHAR.toString().equals(type)
+          || ColumnDescription.DataTypes.CHAR.toString().equals(type)
+      );
+
+      if(!first){
+        insertQuery.append(", ");
+      }
+
+      if(unhex) {
+        insertQuery.append("UNHEX(");
+      }
+
+      insertQuery.append(column.getName());
+
+      if(unhex) {
+        insertQuery.append(")");
+      }
+
+      first = false;
+    }
+
+    insertQuery.append(" FROM ").append(ifqi.getFromDatabase()).append(".").append(ifqi.getFromTable()).append(";");
+    String query = insertQuery.toString();
+    LOG.info("Insert Query : {}", query);
+    return query;
   }
 
   public String generateDropTableQuery(DeleteQueryInput deleteQueryInput) {
-    String dropQuery = "drop table " + deleteQueryInput.getDatabase() + "." + deleteQueryInput.getTable();
+    String dropQuery = new StringBuilder("DROP TABLE ").append(deleteQueryInput.getDatabase())
+                      .append(".").append(deleteQueryInput.getTable()).append(";").toString();
     LOG.info("Drop Query : {}", dropQuery);
     return dropQuery;
   }
-
-  public String generateLoadQuery(LoadQueryInput loadQueryInput) {
-    String loadFromQuery = "LOAD DATA INPATH '"  + loadQueryInput.getHdfsFilePath() + "' INTO TABLE " + loadQueryInput.getDatabaseName() + "." + loadQueryInput.getTableName() + ";" ;
-    LOG.info("Load From Query : {}", loadFromQuery);
-    return loadFromQuery;
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/RowFormat.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/RowFormat.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/RowFormat.java
new file mode 100644
index 0000000..4c1cb2b
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/RowFormat.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.uploads.query;
+
+public class RowFormat {
+  private Character fieldsTerminatedBy;
+  private Character escapedBy;
+
+  private RowFormat() {
+  }
+
+  public RowFormat(Character fieldsTerminatedBy, Character escapedBy) {
+    this.fieldsTerminatedBy = fieldsTerminatedBy;
+    this.escapedBy = escapedBy;
+  }
+
+  public Character getFieldsTerminatedBy() {
+    return fieldsTerminatedBy;
+  }
+
+  public void setFieldsTerminatedBy(Character fieldsTerminatedBy) {
+    this.fieldsTerminatedBy = fieldsTerminatedBy;
+  }
+
+  public Character getEscapedBy() {
+    return escapedBy;
+  }
+
+  public void setEscapedBy(Character escapedBy) {
+    this.escapedBy = escapedBy;
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("RowFormat{ fieldsTerminatedBy='");
+    sb.append(fieldsTerminatedBy).append( '\'').append(", escapedBy='")
+      .append(escapedBy).append("\'}");
+
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d90506/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java
index 903e5b0..76f448c 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/query/TableInfo.java
@@ -21,17 +21,20 @@ package org.apache.ambari.view.hive.resources.uploads.query;
 import org.apache.ambari.view.hive.resources.uploads.ColumnDescriptionImpl;
 import org.apache.ambari.view.hive.resources.uploads.HiveFileType;
 
+import java.io.Serializable;
 import java.util.List;
 
 /**
  * used as input in Query generation
  */
-public class TableInfo {
+public class TableInfo implements Serializable{
   private String tableName;
   private String databaseName;
-  private List<ColumnDescriptionImpl> columns;
+  private List<ColumnDescriptionImpl> header;
   private HiveFileType hiveFileType;
 
+  private RowFormat rowFormat;
+
   public String getTableName() {
     return tableName;
   }
@@ -48,12 +51,12 @@ public class TableInfo {
     this.databaseName = databaseName;
   }
 
-  public List<ColumnDescriptionImpl> getColumns() {
-    return columns;
+  public List<ColumnDescriptionImpl> getHeader() {
+    return header;
   }
 
-  public void setColumns(List<ColumnDescriptionImpl> columns) {
-    this.columns = columns;
+  public void setHeader(List<ColumnDescriptionImpl> header) {
+    this.header = header;
   }
 
   public HiveFileType getHiveFileType() {
@@ -64,18 +67,28 @@ public class TableInfo {
     this.hiveFileType = hiveFileType;
   }
 
-  public TableInfo(String databaseName, String tableName, List<ColumnDescriptionImpl> columns, HiveFileType hiveFileType) {
-    this.tableName = tableName;
+  public RowFormat getRowFormat() {
+    return rowFormat;
+  }
+
+  public void setRowFormat(RowFormat rowFormat) {
+    this.rowFormat = rowFormat;
+  }
+
+  public TableInfo(String databaseName, String tableName, List<ColumnDescriptionImpl> header, HiveFileType hiveFileType, RowFormat rowFormat) {
     this.databaseName = databaseName;
-    this.columns = columns;
+    this.tableName = tableName;
+    this.header = header;
     this.hiveFileType = hiveFileType;
+    this.rowFormat = rowFormat;
   }
 
   public TableInfo(TableInfo tableInfo) {
     this.tableName = tableInfo.tableName;
     this.databaseName = tableInfo.databaseName;
-    this.columns = tableInfo.columns;
+    this.header = tableInfo.header;
     this.hiveFileType = tableInfo.hiveFileType;
+    this.rowFormat = tableInfo.rowFormat;
   }
 
   public TableInfo() {