You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rz...@apache.org on 2017/02/17 22:57:02 UTC
[24/51] [abbrv] ambari git commit: AMBARI-19872 : HiveView2.0 : added
Upload CSV, JSON, XML to create table feature in the new view (nitirajrathore)
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
new file mode 100644
index 0000000..ba3260c
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
@@ -0,0 +1,925 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import NewTable from './new';
+import constants from '../../../../utils/constants';
+import Column from '../../../../models/column';
+import datatypes from '../../../../configs/datatypes';
+
+export default NewTable.extend({
+ COLUMN_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
+ TABLE_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
+ HDFS_PATH_REGEX: "^[/]{1}.+", // unix path allows everything but here we have to mention full path so starts with /
+ init: function () {
+ this._super();
+ },
+
+ COLUMN_NAME_PREFIX : "column",
+ i18n : Ember.inject.service('i18n'),
+ jobService: Ember.inject.service(constants.services.jobs),
+ notifyService: Ember.inject.service(constants.services.alertMessages),
+ showErrors: false,
+ baseUrl: "/resources/upload",
+ header: null, // header received from server
+ files: null, // files that need to be uploaded only file[0] is relevant
+ firstRow: [], // the actual first row of the table.
+ rows: null, // preview rows received from server
+ databaseName: null,
+ selectedDatabase: null,
+ filePath: null,
+ tableName: null,
+ uploadProgressInfos : [],
+ DEFAULT_DB_NAME : 'default',
+ showPreview : false,
+ containsEndlines: false,
+ storedAsTextFile : Ember.computed.equal("selectedFileType","TEXTFILE"),
+ storedAsNotTextFile : Ember.computed.not("storedAsTextFile"),
+ setupController(controller, model) {
+ this._super(controller, model);
+ this.controller.set("showUploadTableModal", false);
+ },
+ onChangeSelectedFileType: function(){
+ if(this.get('selectedFileType') === this.get('fileTypes')[1] && this.get('containsEndlines') === true){
+ this.set('containsEndlines', false);
+ }
+ }.observes("selectedFileType", "containsEndlines"),
+ getUploader(){
+ return this.get('store').adapterFor('upload-table');
+ },
+ onChangeUploadSource : function(){
+ this.clearFields();
+ }.observes("uploadSource"),
+ showCSVFormatInput: false,
+ uploadProgressInfo : Ember.computed("uploadProgressInfos.[]",function(){
+ var info = "";
+ for( var i = 0 ; i < this.get('uploadProgressInfos').length ; i++)
+ info += this.get('uploadProgressInfos').objectAt(i);
+
+ return new Ember.Handlebars.SafeString(info);
+ }),
+ _setHeaderElements : function(header,valueArray){
+ header.forEach(function (item, index) {
+ Ember.set(item, 'name', valueArray[index]);
+ }, this);
+ },
+ isFirstRowHeaderDidChange: function () {
+ if (this.get('isFirstRowHeader') != null && typeof this.get('isFirstRowHeader') !== 'undefined') {
+ if (this.get('isFirstRowHeader') == false) {
+ if (this.get('rows')) {
+ this.get('rows').unshiftObject({row: this.get('firstRow')});
+ this._setHeaderElements(this.get('header'),this.get('defaultColumnNames'));
+ }
+ } else if (this.get('header')) { // headers are available
+ // take first row of
+ this._setHeaderElements(this.get('header'),this.get('firstRow'));
+ this.get('rows').removeAt(0);
+ }
+
+ this.printValues();
+ }
+ }.observes('isFirstRowHeader'),
+
+ popUploadProgressInfos: function () {
+ // var msg = this.get('uploadProgressInfos').popObject();
+ },
+
+ pushUploadProgressInfos : function(info){
+ this.controller.set("uploadTableMessage", info);
+ this.showUploadModal();
+ // this.get('uploadProgressInfos').pushObject(info);
+ },
+ clearUploadProgressModal : function(){
+ var len = this.get('uploadProgressInfos').length;
+ for( var i = 0 ; i < len ; i++){
+ this.popUploadProgressInfos();
+ }
+ },
+
+ hideUploadModal : function(){
+ this.controller.set("showUploadTableModal", false);
+ this.clearUploadProgressModal();
+ },
+
+ showUploadModal : function(){
+ this.controller.set("showUploadTableModal", true);
+ },
+
+ clearFields: function () {
+ this.set("showPreview",false);
+ this.set("hdfsPath");
+ this.set("header");
+ this.set("rows");
+ this.set("escapedBy");
+ this.set("fieldsTerminatedBy");
+ this.set("error");
+ this.set('files');
+ this.set("firstRow");
+ this.set("selectedDatabase",null);
+ this.set("databaseName");
+ this.set("filePath");
+ this.set('tableName');
+ this.clearUploadProgressModal();
+ this.printValues();
+ },
+
+ printValues: function () {
+ console.log("header : ", this.get('header'),
+ ". rows : ",this.get('rows'),". error : ", this.get('error'),
+ " isFirstRowHeader : ", this.get('isFirstRowHeader'),
+ "firstRow : ", this.get('firstRow'));
+ },
+
+ generateTempTableName: function () {
+ var text = "";
+ var possible = "abcdefghijklmnopqrstuvwxyz";
+
+ for (var i = 0; i < 30; i++)
+ text += possible.charAt(Math.floor(Math.random() * possible.length));
+
+ return text;
+ },
+
+ waitForJobStatus: function (jobId, resolve, reject) {
+ console.log("finding status of job: ", jobId);
+ var self = this;
+ var fetchJobPromise = this.get('jobService').getJob(jobId);
+ fetchJobPromise.then(function (data) {
+ console.log("waitForJobStatus : data : ", data);
+ var job = JSON.parse(JSON.stringify(data));
+ var status = job.status;
+ if (status == constants.statuses.succeeded ) {
+ console.log("resolving waitForJobStatus with : " , status);
+ resolve(job);
+ } else if (status == constants.statuses.canceled || status == constants.statuses.closed || status == constants.statuses.error) {
+ console.log("rejecting waitForJobStatus with : " + status);
+ reject(new Error(job.statusMessage));
+ } else {
+ Ember.run.later(function(){
+ console.log("retrying waitForJobStatus : ", jobId);
+ self.waitForJobStatus(jobId, resolve, reject);
+ }, 2000);
+ }
+ }, function (error) {
+ console.log("rejecting waitForJobStatus with : " + error);
+ reject(error);
+ })
+ },
+
+ uploadForPreview: function (sourceObject) {
+ console.log("uploaderForPreview called.");
+ let files = sourceObject.get("fileInfo.files");
+ let csvParams = sourceObject.get("fileFormatInfo.csvParams");
+
+ return this.getUploader().uploadFiles('preview', files, {
+ "isFirstRowHeader": sourceObject.get("isFirstRowHeader"),
+ "inputFileType": sourceObject.get("fileFormatInfo.inputFileType").id,
+ "csvDelimiter": csvParams.get("csvDelimiter").name,
+ "csvEscape": csvParams.get("csvEscape").name,
+ "csvQuote": csvParams.get("csvQuote").name
+ });
+ },
+
+ getAsciiChar : function(key){
+ if(!key){
+ return null;
+ }
+
+ var value = this.get(key);
+ if(value && value.id != -1) {
+ return String.fromCharCode(value.id);
+ }else{
+ return null;
+ }
+ },
+ getCSVParams : function(){
+ var csvd = this.getAsciiChar('csvDelimiter');
+ if(!csvd && csvd != 0) csvd = this.get('DEFAULT_CSV_DELIMITER');
+
+ var csvq = this.getAsciiChar('csvQuote');
+ if(!csvq && csvq != 0) csvq = this.get('DEFAULT_CSV_QUOTE');
+
+ var csve = this.getAsciiChar('csvEscape');
+ if(!csve && csve != 0) csve = this.get('DEFAULT_CSV_ESCAPE');
+
+ return {"csvDelimiter": csvd, "csvQuote" : csvq, "csvEscape": csve};
+ },
+
+ uploadForPreviewFromHDFS: function (sourceObject) {
+ console.log("uploadForPreviewFromHDFS called.");
+ // this.validateHDFSPath(hdfsPath);
+ var self = sourceObject;
+ var hdfsPath = sourceObject.get("fileInfo.hdfsPath");
+ var csvParams = sourceObject.get("fileFormatInfo.csvParams");
+
+ return this.getUploader().previewFromHDFS({
+ "isFirstRowHeader": sourceObject.get("fileFormatInfo.isFirstRowHeader"),
+ "inputFileType": sourceObject.get("fileFormatInfo.inputFileType").id,
+ "hdfsPath": hdfsPath,
+ "csvDelimiter": csvParams.get("csvDelimiter").name,
+ "csvEscape": csvParams.get("csvEscape").name,
+ "csvQuote": csvParams.get("csvQuote").name
+ });
+ },
+
+ generatePreview: function (sourceObject) {
+ var self = this;
+ var promise = null;
+ try {
+ this.waitForGeneratingPreview();
+ if (sourceObject.get('fileInfo.uploadSource') === "local" ) {
+ promise = this.uploadForPreview(sourceObject);
+ } else {
+ promise = this.uploadForPreviewFromHDFS(sourceObject);
+ }
+
+ return promise.then(function (data) {
+ self.onGeneratePreviewSuccess(data);
+ }, function (error) {
+ self.onGeneratePreviewFailure(error);
+ }).catch(function (error) {
+ console.log("inside catch : ", error);
+ }).finally(function () {
+ console.log("finally hide the modal always after preview.");
+ self.hideUploadModal();
+ });
+ }catch(e){
+ // exception before promise will be caught here.
+ console.log("exception before promise : ", e);
+ self.setError(e);
+ }finally{
+ console.log("finally hide the modal always after preview.");
+ self.hideUploadModal();
+ }
+ },
+
+ waitForGeneratingPreview: function () {
+ console.log("waitForGeneratingPreview");
+ this.showUploadModal();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.generatingPreview'))
+ },
+
+ previewTable: function (data) {
+ console.log('inside previewTable. data : ', data);
+ var self = this;
+ var defaultColumnNames = data.header.map(function(item,index){
+ return { "name": self.COLUMN_NAME_PREFIX + (index + 1) }
+ });
+ this.set("defaultColumnNames",defaultColumnNames);
+ this.set("previewData", data);
+ this.set("header", this.get("previewData.header"));
+ this.set('isFirstRowHeader', this.get("previewData.isFirstRowHeader"));
+ this.set('tableName', this.get("previewData.tableName"));
+ if (data.isFirstRowHeader == true) {
+ this.set("firstRow", this.get("previewData.header"));
+ }else {
+ if(data.rows.length > 0){
+ this.set("firstRow", this.get("previewData.rows")[0].row);
+ }else{
+ this.set("firstRow", Ember.A());
+ }
+ }
+ this.set("rows", this.get("previewData.rows"));
+ this.controller.set('tableName', this.get("previewData.tableName"));
+ this.controller.set("rows", this.get("previewData.rows"));
+ this.controller.set("columns", this.transformToColumnModelList(this.get("previewData.header")));
+ },
+
+ transformToColumnModelList : function(columns){
+ var _this = this;
+ if(columns){
+ return columns.map(function(column){
+ return _this.transformToColumnModel(column);
+ });
+ }
+ return Ember.A();
+ },
+
+ transformToColumnModel: function (column) {
+ return Column.create({
+ name: column.name,
+ type: datatypes.findBy("label", column.type),
+ editing: true
+ })
+ },
+ onGeneratePreviewSuccess: function (data) {
+ console.log("onGeneratePreviewSuccess");
+ this.set("showPreview",true);
+ this.hideUploadModal();
+ this.previewTable(data);
+ },
+
+ onGeneratePreviewFailure: function (error) {
+ console.log("onGeneratePreviewFailure");
+ this.set("showPreview",false);
+ this.hideUploadModal();
+ this.setError(error);
+ },
+
+ createActualTable: function (tableData) {
+ console.log("createActualTable");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateActualTable'));
+ var retValue = this.createTable(tableData.get("tableMeta"));
+ return retValue;
+ // var self = this;
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateActualTable'));
+ // var headers = this.get('header');
+ // var selectedDatabase = this.get('selectedDatabase');
+ // if (!selectedDatabase) {
+ // throw new Error(this.translate('hive.errors.emptyDatabase', {database : this.translate("hive.words.database")}));
+ // }
+ //
+ // this.set('databaseName', this.get('selectedDatabase.id'));
+ // var databaseName = this.get('databaseName');
+ // var tableName = this.get("tableMeta").name;
+ // var isFirstRowHeader = this.get('isFirstRowHeader');
+ // var filetype = this.get("selectedFileType");
+ //
+ // this.validateInput(headers,tableName,databaseName,isFirstRowHeader);
+ // this.showUploadModal();
+ // var rowFormat = this.getRowFormat();
+ // return this.getUploader().createTable({
+ // "isFirstRowHeader": isFirstRowHeader,
+ // "header": headers,
+ // "tableName": tableName,
+ // "databaseName": databaseName,
+ // "hiveFileType":filetype,
+ // "rowFormat": { "fieldsTerminatedBy" : rowFormat.fieldsTerminatedBy, "escapedBy" : rowFormat.escapedBy}
+ // });
+ },
+ getRowFormat : function(){
+ var fieldsTerminatedBy = this.getAsciiChar('fieldsTerminatedBy');
+ var escapedBy = this.getAsciiChar('escapedBy');
+ return {"fieldsTerminatedBy": fieldsTerminatedBy, "escapedBy" : escapedBy};
+ },
+ waitForCreateActualTable: function (jobId) {
+ console.log("waitForCreateActualTable");
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToCreateActualTable'));
+ var self = this;
+ var p = new Ember.RSVP.Promise(function (resolve, reject) {
+ self.waitForJobStatus(jobId, resolve, reject);
+ });
+
+ return p;
+ },
+ onCreateActualTableSuccess: function () {
+ console.log("onCreateTableSuccess");
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyCreatedActualTable'));
+ },
+ onCreateActualTableFailure: function (error) {
+ console.log("onCreateActualTableFailure");
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToCreateActualTable'));
+ this.setError(error);
+ },
+ createTempTable: function (tableData) {
+ let tableMeta = JSON.parse(JSON.stringify(tableData.get("tableMeta")));
+ // manually copy the columns as they are missing members when copying
+ let columns = tableData.get("tableMeta").columns.map(function(col){
+ return col.copy();
+ });
+ tableMeta.columns = columns;
+
+ console.log("tableMeta : ", tableMeta);
+
+ var self = this;
+ console.log("createTempTable");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateTemporaryTable'));
+ var tempTableName = this.generateTempTableName();
+ tableMeta.name = tempTableName;
+
+ var headers = tableMeta.columns.map(function(column){
+ if(tableData.fileFormatInfo.containsEndlines){
+ column.type.label = "STRING";
+ delete column.scale;
+ delete column.precision;
+ }
+ return column;
+ });
+
+ tableMeta.columns = headers;
+ tableMeta.settings = {};
+ tableMeta.properties = [];
+ tableMeta.settings.fileFormat = {};
+ tableMeta.settings.fileFormat.type = "TEXTFILE";
+ this.set("tableData.tempTableMeta", tableMeta);
+ return this.createTable(tableMeta);
+ // return this.getUploader().createTable({
+ // "isFirstRowHeader": this.get("isFirstRowHeader"),
+ // "header": headers,
+ // "tableName": tempTableName,
+ // "databaseName": this.get('databaseName'),
+ // "hiveFileType":"TEXTFILE",
+ // "rowFormat": { "fieldsTerminatedBy" : parseInt('1', 10), "escapedBy" : null}
+ // });
+ },
+
+ waitForCreateTempTable: function (jobId) {
+ console.log("waitForCreateTempTable");
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToCreateTemporaryTable'));
+ var self = this;
+ var p = new Ember.RSVP.Promise(function (resolve, reject) {
+ self.waitForJobStatus(jobId, resolve, reject);
+ });
+
+ return p;
+ },
+
+ onCreateTempTableSuccess: function () {
+ console.log("onCreateTempTableSuccess");
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyCreatedTemporaryTable'));
+ },
+
+ deleteTable : function(databaseName, tableName){
+ console.log("deleting table ", databaseName , "." , tableName);
+ return this.getUploader().deleteTable({
+ "database": databaseName,
+ "table": tableName
+ });
+ },
+
+ deleteTableOnError: function (databaseName, tableName, tableLabel) {
+ //delete table and wait for delete job
+ var self = this;
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.deletingTable',{table:tableLabel}));
+
+ return this.deleteTable(databaseName, tableName).then(function (job) {
+ return new Ember.RSVP.Promise(function (resolve, reject) {
+ self.waitForJobStatus(job.id, resolve, reject);
+ });
+ }).then(function () {
+ self.popUploadProgressInfos();
+ self.pushUploadProgressInfos(this.formatMessage('hive.messages.succesfullyDeletedTable',{table:tableLabel}));
+ return Ember.RSVP.Promise.resolve();
+ }, function (err) {
+ self.popUploadProgressInfos();
+ self.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToDeleteTable',{table:tableLabel}));
+ self.setError(err);
+ return Ember.RSVP.Promise.reject();
+ });
+ },
+
+ rollBackActualTableCreation : function(){
+ return this.deleteTableOnError(this.get("database"),this.get("tableMeta").name,this.translate('hive.words.actual'));
+ },
+
+ translate : function(str,vars){
+ return this.get('i18n').t(str,vars);
+ },
+ formatMessage : function(messageId, vars){
+ return this.translate(messageId, vars);
+ },
+ onCreateTempTableFailure : function(error){
+ console.log("onCreateTempTableFailure");
+ this.setError(error);
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToCreateTemporaryTable'));
+ return this.rollBackActualTableCreation().then(function(data){
+ return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+ }, function (err) {
+ return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+ });
+ },
+
+ uploadFile: function (tableData) {
+ console.log("uploadFile");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToUploadFile'));
+ let uploadSource = tableData.get("fileInfo").get("uploadSource");
+ if(uploadSource === "local"){
+ return this.uploadTable(tableData);
+ }else{
+ return this.uploadTableFromHdfs(tableData);
+ }
+ },
+
+ waitForUploadingFile: function (data) {
+ console.log("waitForUploadingFile");
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToUploadFile'));
+ if( data.jobId ){
+ var self = this;
+ var p = new Ember.RSVP.Promise(function (resolve, reject) {
+ self.waitForJobStatus(data.jobId, resolve, reject);
+ });
+ return p;
+ }else{
+ return Ember.RSVP.Promise.resolve(data);
+ }
+ },
+
+ onUploadingFileSuccess: function () {
+ console.log("onUploadingFileSuccess");
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyUploadedFile') );
+ },
+
+ rollBackTempTableCreation: function () {
+ var self = this;
+ return this.deleteTableOnError(this.get("database"),this.get("tempTableMeta").name,this.translate('hive.words.temporary')).then(function(data){
+ return self.rollBackActualTableCreation();
+ },function(err){
+ return self.rollBackActualTableCreation();
+ })
+ },
+
+ onUploadingFileFailure: function (error) {
+ console.log("onUploadingFileFailure");
+ this.setError(error);
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToUploadFile'));
+ return this.rollBackTempTableCreation().then(function(data){
+ return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+ },function(err){
+ return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+ });
+ },
+
+ rollBackUploadFile : function(){
+ return this.rollBackTempTableCreation();
+ },
+
+ insertIntoTable : function(tableData){
+ console.log("insertIntoTable");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToInsertRows'));
+
+ let headers = tableData.get("tableMeta").columns.map(function(column){
+ var header = JSON.parse(JSON.stringify(column));
+ header.type = column.type.label;
+ return header;
+ });
+
+ return this.getUploader().insertIntoTable({
+ "fromDatabase": tableData.get("database"),
+ "fromTable": tableData.get("tempTableMeta").name,
+ "toDatabase": tableData.get("database"),
+ "toTable": tableData.get("tableMeta").name,
+ "header": headers,
+ "unhexInsert": tableData.fileFormatInfo.containsEndlines
+ });
+ },
+
+ waitForInsertIntoTable: function (jobId) {
+ console.log("waitForInsertIntoTable");
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToInsertRows'));
+ var self = this;
+ var p = new Ember.RSVP.Promise(function (resolve, reject) {
+ self.waitForJobStatus(jobId, resolve, reject);
+ });
+
+ return p;
+ },
+
+ onInsertIntoTableSuccess: function () {
+ console.log("onInsertIntoTableSuccess");
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyInsertedRows'));
+ },
+
+ onInsertIntoTableFailure: function (error) {
+ console.log("onInsertIntoTableFailure");
+ this.setError(error);
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToInsertRows'));
+ return this.rollBackUploadFile().then(function(data){
+ return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+ },function(err){
+ return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
+ });
+ },
+ deleteTempTable : function(tableData){
+ console.log("deleteTempTable");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToDeleteTemporaryTable'));
+
+ return this.deleteTable(
+ tableData.get("database"),
+ tableData.get("tempTableMeta").name
+ );
+ },
+ waitForDeleteTempTable: function (jobId) {
+ console.log("waitForDeleteTempTable");
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToDeleteTemporaryTable'));
+ var self = this;
+ var p = new Ember.RSVP.Promise(function (resolve, reject) {
+ self.waitForJobStatus(jobId, resolve, reject);
+ });
+
+ return p;
+ },
+ onDeleteTempTableSuccess: function () {
+ console.log("onDeleteTempTableSuccess");
+ this.popUploadProgressInfos();
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyDeletedTemporaryTable'));
+ this.onUploadSuccessfull();
+ },
+ onDeleteTempTableFailure: function (error) {
+ console.log("onDeleteTempTableFailure");
+ this.setError(error);
+ this.setError(this.formatMessage('hive.messages.manuallyDeleteTable',{databaseName:this.get('databaseName'), tableName: this.get("tempTableName")}));
+ },
+ validateHDFSPath: function (hdfsPath) {
+ if (null == hdfsPath || hdfsPath == "") throw new Error(this.translate('hive.errors.emptyHdfsPath'));
+ var hdfsRegex = new RegExp(this.get("HDFS_PATH_REGEX"), "g");
+ var mArr = hdfsPath.match(hdfsRegex);
+ if (mArr == null || mArr.length != 1) throw new Error(this.translate('hive.errors.illegalHdfPath', {"hdfsPath": hdfsPath} ));
+ },
+ createTableAndUploadFile: function (tableData) {
+ let databaseModel = this.controllerFor('databases.database').get('model');
+ let database = databaseModel.get('name');
+ tableData.set("database", database);
+ this.set("tableData", tableData);
+
+ var self = this;
+ self.setError();
+ self.createActualTable(tableData)
+ .then(function(job){
+ console.log("1. received job : ", job);
+ return self.waitForCreateActualTable(job.id);
+ },function(error){
+ console.log("Error occurred: ", error);
+ self.onCreateActualTableFailure(error);
+ throw error;
+ })
+ .then(function(data){
+ self.onCreateActualTableSuccess(data);
+ return self.createTempTable(tableData);
+ },function(error){
+ if(!self.get('error')){
+ console.log("Error occurred: ", error);
+ self.onCreateActualTableFailure(error);
+ }
+ throw error;
+ })
+ .then(function(job){
+ return self.waitForCreateTempTable(job.id);
+ },function(error){
+ if(!self.get('error')){
+ console.log("Error occurred: ", error);
+ return self.onCreateTempTableFailure(error);
+ }
+ throw error;
+ })
+ .then(function(data){
+ self.onCreateTempTableSuccess(data);
+ return self.uploadFile(tableData);
+ },function(error){
+ if(!self.get('error')){
+ console.log("Error occurred: ", error);
+ return self.onCreateTempTableFailure(error);
+ }
+ throw error;
+ }).then(function(data){
+ return self.waitForUploadingFile(data);
+ },function(error){
+ if(!self.get('error')){
+ console.log("Error occurred: ", error);
+ return self.onUploadingFileFailure(error);
+ }
+ throw error;
+ })
+ .then(function(data){
+ self.onUploadingFileSuccess(data);
+ return self.insertIntoTable(tableData);
+ },function(error){
+ if(!self.get('error')){
+ console.log("Error occurred: ", error);
+ return self.onUploadingFileFailure(error);
+ }
+ throw error;
+ })
+ .then(function(job){
+ return self.waitForInsertIntoTable(job.id);
+ },function(error){
+ if(!self.get('error')){
+ console.log("Error occurred: ", error);
+ return self.onInsertIntoTableFailure(error);
+ }
+ throw error;
+ })
+ .then(function(data){
+ self.onInsertIntoTableSuccess(data);
+ return self.deleteTempTable(tableData);
+ },function(error){
+ if(!self.get('error')){
+ console.log("Error occurred: ", error);
+ return self.onInsertIntoTableFailure(error);
+ }
+ throw error;
+ })
+ .then(function(job){
+ return self.waitForDeleteTempTable(job.id);
+ },function(error){
+ if(!self.get('error')){
+ console.log("Error occurred: ", error);
+ self.onDeleteTempTableFailure(error);
+ }
+ throw error;
+ })
+ .then(function(data){
+ self.onDeleteTempTableSuccess(data);
+ },function(error){
+ if(!self.get('error')){
+ console.log("Error occurred: ", error);
+ self.onDeleteTempTableFailure(error);
+ }
+ throw error;
+ })
+ .catch(function(error){
+ console.log("inside catch : ", error);
+ })
+ .finally(function(){
+ console.log("finally hide the modal always");
+ self.hideUploadModal();
+ });
+ },
+ validateInput: function (headers,tableName,databaseName,isFirstRowHeader) {
+ // throw exception if invalid.
+ if(!headers || headers.length == 0) throw new Error(this.translate('hive.errors.emptyHeaders'));
+
+ var regex = new RegExp(this.get("COLUMN_NAME_REGEX"),"g");
+
+ headers.forEach(function(column,index){
+ if( !column ) throw new Error(this.translate('hive.errors.emptyColumnName'));
+ var matchArr = column.name.match(regex);
+ if(matchArr == null || matchArr.length != 1 ) throw new Error(this.translate('hive.errors.illegalColumnName',{ columnName : column.name, index : (index + 1)}));
+ },this);
+
+ if(!tableName) throw new Error(this.translate('hive.errors.emptyTableName', {tableNameField : this.translate('hive.ui.tableName')}));
+ var tableRegex = new RegExp(this.get("TABLE_NAME_REGEX"),"g");
+ var mArr = tableName.match(tableRegex);
+ if(mArr == null || mArr.length != 1 ) throw new Error(this.translate('hive.errors.illegalTableName', {tableNameField:this.translate('hive.ui.tableName'),tableName:tableName}) );
+
+ if(!databaseName) throw new Error(this.translate('hive.errors.emptyDatabase', {database:this.translate('hive.words.database')}));
+
+ if (null == isFirstRowHeader || typeof isFirstRowHeader === 'undefined') { //this can be true or false. so explicitly checking for null/ undefined.
+ throw new Error(this.translate('hive.errors.emptyIsFirstRow', {isFirstRowHeaderField:this.translate('hive.ui.isFirstRowHeader')}));
+ }
+ },
+ setError: function (error) {
+ if(error){
+ console.log(" error : ", error);
+ this.set('error', JSON.stringify(error));
+ // this.get('notifyService').warn(error);
+ // TODO : add notifyService warn message.
+ console.log("TODO : add notifyService warn message.");
+ }else{
+ this.set("error");
+ }
+ },
+ previewError: function (error) {
+ this.setError(error);
+ },
+ uploadTableFromHdfs : function(tableData){
+ console.log("uploadTableFromHdfs called.");
+ // if(!(this.get("inputFileTypeCSV") == true && this.get("isFirstRowHeader") == false) ){
+ this.pushUploadProgressInfos(this.formatMessage('uploadingFromHdfs'));
+ // }
+ var csvParams = tableData.get("fileFormatInfo.csvParams");
+ let columns = tableData.get("tableMeta").columns.map(function(column){
+ return {"name": column.get("name"), "type": column.get("type.label")};
+ });
+ let header = columns; //JSON.stringify(columns);
+
+ return this.getUploader().uploadFromHDFS({
+ "isFirstRowHeader": tableData.get("fileFormatInfo.isFirstRowHeader"),
+ "databaseName": tableData.get("database"),
+ "tableName": tableData.get("tempTableMeta").name,
+ "inputFileType": tableData.get("fileFormatInfo.inputFileType").id,
+ "hdfsPath": tableData.get("fileInfo.hdfsPath"),
+ "header": header,
+ "containsEndlines": tableData.get("fileFormatInfo.containsEndlines"),
+ "csvDelimiter": csvParams.get("csvDelimiter").name,
+ "csvEscape": csvParams.get("csvEscape").name,
+ "csvQuote": csvParams.get("csvQuote").name
+ });
+ },
+ uploadTable: function (tableData) {
+ this.printValues();
+ var csvParams = tableData.get("fileFormatInfo.csvParams");
+ let columns = tableData.get("tableMeta").columns.map(function(column){
+ return {"name": column.get("name"), "type": column.get("type.label")};
+ });
+ let header = JSON.stringify(columns);
+ return this.getUploader().uploadFiles('upload', tableData.get("fileInfo.files"), {
+ "isFirstRowHeader": tableData.get("fileFormatInfo.isFirstRowHeader"),
+ "databaseName" : tableData.get("database"),
+ "tableName" : tableData.get("tempTableMeta").name,
+ "inputFileType" : tableData.get("fileFormatInfo.inputFileType").id,
+ "header": header,
+ "containsEndlines": tableData.get("fileFormatInfo.containsEndlines"),
+ "csvDelimiter": csvParams.get("csvDelimiter").name,
+ "csvEscape": csvParams.get("csvEscape").name,
+ "csvQuote": csvParams.get("csvQuote").name
+ });
+ },
+
+ onUploadSuccessfull: function (data) {
+ console.log("onUploadSuccessfull : ", data);
+ this._transitionToCreatedTable(this.get("tableData").get('database'), this.get("tableData").get('tableMeta').name);
+
+ // this.get('notifyService').success(this.translate('hive.messages.successfullyUploadedTableHeader'),
+ // this.translate('hive.messages.successfullyUploadedTableMessage' ,{tableName:this.get("tableData").get("tableMeta").name ,databaseName:this.get("tableData").get("database")}));
+ this.clearFields();
+ },
+
+ onUploadError: function (error) {
+ console.log("onUploadError : ", error);
+ this.setError(error);
+ },
+ showOrHide: function () {
+ if (this.get('show') == false) {
+ this.set("displayOption", "display:none");
+ this.set("showMoreOrLess", "Show More");
+ } else {
+ this.set("displayOption", "display:table-row");
+ this.set("showMoreOrLess", "Show Less");
+ }
+ },
+
+ displayOption: "display:none",
+ actions: {
+ toggleCSVFormat: function() {
+ console.log("inside toggleCSVFormat");
+ this.toggleProperty('showCSVFormatInput');
+ },
+ hideInputParamModal : function(){
+ Ember.$("#inputParamsModal").modal("hide");
+ },
+ showInputParamModal : function(){
+ if(this.get('inputFileTypeCSV')){
+ Ember.$("#inputParamsModal").modal("show");
+ }
+ },
+ hideRowFormatModal : function(){
+ Ember.$("#rowFormatModal").modal("hide");
+ },
+ showRowFormatModal : function(){
+ if(this.get('storedAsTextFile')) {
+ Ember.$("#rowFormatModal").modal("show");
+ }
+ },
+ toggleErrors: function () {
+ this.toggleProperty('showErrors');
+ },
+ // filesUploaded: function (files) {
+ // console.log("upload-table.js : uploaded new files : ", files);
+ // this.clearFields();
+ //
+ // this.set('files', files);
+ // var name = files[0].name;
+ // var i = name.indexOf(".");
+ // var tableName = name.substr(0, i);
+ // this.set('tableName', tableName);
+ // var self = this;
+ // return this.generatePreview(sourceObject)
+ // },
+ preview: function (previewObject) {
+ console.log("upload-table.js : uploaded new files : ", previewObject);
+ this.clearFields();
+
+ this.set('previewObject', previewObject);
+ // var name = previewObject.get("fileInfo").get("files")[0].name;
+ // var i = name.indexOf(".");
+ // var tableName = name.substr(0, i);
+ // this.set('tableName', tableName);
+ // var self = this;
+ return this.generatePreview(previewObject)
+ },
+ previewFromHdfs: function () {
+ return this.generatePreview();
+ },
+ uploadTable: function (tableData) {
+ console.log("tableData", tableData);
+ try {
+ this.createTableAndUploadFile(tableData);
+ } catch (e) {
+ console.log("exception occured : ", e);
+ this.setError(e);
+ this.hideUploadModal();
+ }
+ },
+ uploadFromHDFS: function () {
+ this.set("isLocalUpload", false);
+ }
+ }
+});
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
index 453eb12..8503715 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
@@ -66,4 +66,7 @@ export default Ember.Service.extend({
this.get('store').adapterFor('job').fetchResult(jobId);
},
+ getJob: function (jobId) {
+ return this.get('store').findRecord('job', jobId, {reload: true})
+ }
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js b/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
index 2a0aeed..e442a36 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/table-operations.js
@@ -35,7 +35,7 @@ export default Ember.Service.extend({
detailedInfo: detailedInfo,
storageInfo: storageInfo
});
- return new Promise((resolve, reject) => {
+ return new Ember.RSVP.Promise((resolve, reject) => {
this.get('store').adapterFor('table').createTable(tableInfo).then((data) => {
this.get('store').pushPayload(data);
resolve(this.get('store').peekRecord('job', data.job.id));
@@ -46,7 +46,7 @@ export default Ember.Service.extend({
},
deleteTable(database, table) {
- return new Promise((resolve, reject) => {
+ return new Ember.RSVP.Promise((resolve, reject) => {
this.get('store').adapterFor('table').deleteTable(database, table).then((data) => {
this.get('store').pushPayload(data);
resolve(this.get('store').peekRecord('job', data.job.id));
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
new file mode 100644
index 0000000..a7cb862
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
@@ -0,0 +1,118 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="panel panel-info">
+ <div class="panel-heading">
+ <div class="panel-title">
+ <button class="btn btn-primary {{if showCSVFormatInput 'active'}}" {{action
+ "toggleCSVFormat"}}>
+ {{fa-icon (if showCSVFormatInput "minus" "plus")}}
+ </button>
+ Select File Format
+ </div>
+ </div>
+ {{#if showCSVFormatInput}}
+ <div class="panel-body rowformat-custom-row">
+ <div class="row">
+ <div class="col-md-6 form-horizontal">
+ <div class="form-group">
+ <label class="col-md-4 control-label">{{t 'hive.ui.fileSource.fileType'}}</label>
+ <div class="col-md-7">
+ {{#power-select
+ selected=fileFormatInfo.inputFileType
+ options=inputFileTypes
+ searchField="name"
+ searchPlaceholder=(t 'hive.ui.fileSource.selectFileType')
+ onchange=(action "inputFileTypeSelected") as |parameter|}}
+ {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+ {{/power-select}}
+ </div>
+ <div class="col-md-1">
+ <a class="text-danger" {{action "clearInputFileType"}}>{{fa-icon "times" size="lg"}}</a>
+ </div>
+ </div>
+ </div>
+ </div>
+ {{#if inputFileTypeCSV}}
+
+ <div class="row">
+ <div class="col-md-6 form-horizontal">
+ <div class="form-group">
+ <label class="col-md-4 control-label">{{t 'hive.ui.csvFormatParams.columnDelimterField'}}</label>
+ <div class="col-md-7">
+ {{#power-select
+ selected=fileFormatInfo.csvParams.csvDelimiter
+ options=terminationChars
+ searchField="name"
+ searchPlaceholder=(t 'hive.ui.csvFormatParams.columnDelimiterTooltip')
+ onchange=(action "csvDelimiterSelected") as |parameter|}}
+ {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+ {{/power-select}}
+ </div>
+ <div class="col-md-1">
+ <a class="text-danger" {{action "clearColumnDelimter"}}>{{fa-icon "times" size="lg"}}</a>
+ </div>
+ </div>
+ </div>
+ </div>
+ <div class="row">
+ <div class="col-md-6 form-horizontal">
+ <div class="form-group">
+ <label class="col-md-4 control-label">{{t 'hive.ui.csvFormatParams.escapeCharacterField'}}</label>
+ <div class="col-md-7">
+ {{#power-select
+ selected=fileFormatInfo.csvParams.csvEscape
+ options=terminationChars
+ searchField="name"
+ searchPlaceholder=(t 'hive.ui.csvFormatParams.escapeCharacterTooltip')
+ onchange=(action "csvEscapeSelected") as |parameter|}}
+ {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+ {{/power-select}}
+ </div>
+ <div class="col-md-1">
+ <a class="text-danger" {{action "clearEscapeCharacter"}}>{{fa-icon "times" size="lg"}}</a>
+ </div>
+ </div>
+ </div>
+ </div>
+ <div class="row">
+ <div class="col-md-6 form-horizontal">
+ <div class="form-group">
+ <label class="col-md-4 control-label">{{t 'hive.ui.csvFormatParams.quoteCharacterField'}}</label>
+ <div class="col-md-7">
+ {{#power-select
+ selected=fileFormatInfo.csvParams.csvQuote
+ options=terminationChars
+ searchField="name"
+ searchPlaceholder=(t 'hive.ui.csvFormatParams.quoteCharacterTooltip')
+ onchange=(action "csvQuoteSelected") as |parameter|}}
+ {{parameter.name}}{{#if parameter.description}} - {{parameter.description}}{{/if}}
+ {{/power-select}}
+ </div>
+ <div class="col-md-1">
+ <a class="text-danger" {{action "clearCsvQuote"}}>{{fa-icon "times" size="lg"}}</a>
+ </div>
+ </div>
+ </div>
+ </div>
+ {{/if}}
+ </div>
+ {{/if}}
+</div>
+
+{{yield}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/templates/components/radio-button.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/radio-button.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/radio-button.hbs
new file mode 100644
index 0000000..6ae472f
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/radio-button.hbs
@@ -0,0 +1,19 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{yield}}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/templates/components/simple-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/simple-table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/simple-table.hbs
new file mode 100644
index 0000000..1a76cc6
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/simple-table.hbs
@@ -0,0 +1,42 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div>
+ <table class="table table-expandable no-border">
+ <thead>
+ <tr>
+ {{#each header as |column|}}
+ <th>
+ {{column.name}}
+ </th>
+ {{/each}}
+ </tr>
+ </thead>
+ <tbody>
+ {{#each rows as |row|}}
+ <tr>
+ {{#each row.row as |item|}}
+ <td>{{item}}</td>
+ {{/each}}
+ </tr>
+ {{/each}}
+ </tbody>
+ </table>
+</div>
+
+{{yield}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table-source.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table-source.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table-source.hbs
new file mode 100644
index 0000000..c8e57e7
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table-source.hbs
@@ -0,0 +1,112 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="panel panel-info">
+ <div class="panel-heading">
+ <div class="panel-title">
+ <button class="btn btn-primary {{if showFileSourceInput 'active'}}" {{action
+ "toggleFileSource"}}>
+ {{fa-icon (if showFileSourceInput "minus" "plus")}}
+ </button>
+ Select File Source
+ </div>
+ </div>
+ {{#if showFileSourceInput}}
+ <div class="panel-body rowformat-custom-row">
+ <div class="row">
+ <div class="col-md-12 form-horizontal">
+ <div class="form-group">
+ <label class="col-md-3 control-label">{{t 'hive.ui.fileSource.uploadFromHdfs'}}</label>
+ <div class="col-md-3">
+ {{radio-button value='hdfs' checked=fileInfo.uploadSource}}
+ </div>
+ <label class="col-md-3 control-label">{{t 'hive.ui.fileSource.uploadFromLocal'}}</label>
+ <div class="col-md-3">
+ {{radio-button value='local' checked=fileInfo.uploadSource}}
+ </div>
+ </div>
+ </div>
+ </div>
+
+ {{#if showHdfsLocationInput}}
+ <div class="panel-body">
+ <div class="row">
+ <div class="col-md-12 form-horizontal">
+ <div class="form-group">
+ <label class="col-md-3 control-label">{{t 'hive.ui.fileSource.enterHdfsPathLabel'}}</label>
+ <div class="col-md-6">
+ {{input type="text" class="form-control" value=fileInfo.hdfsPath}}
+ </div>
+ </div>
+ </div>
+
+ <!--
+ <button class="btn btn-success" {{action
+ "toggleDirectoryViewer"}}>{{t 'hive.ui.fileSource.selectHdfsLocation'}}</button>
+ -->
+ </div>
+ <!--
+ {{#if showDirectoryViewer}}
+ {{hdfs-viewer-modal
+ showSelectedPath=true
+ close="closeHdfsModal"
+ selected="hdfsPath"
+ }}
+ {{/if}}
+ -->
+ </div>
+ {{/if}}
+
+ {{#if showLocalLocationInput}}
+ <div class="panel-body">
+ <div class="row">
+ <div class="col-md-12 form-horizontal">
+ <div class="form-group">
+ <label class="col-md-3 control-label">{{t 'hive.ui.fileSource.selectLocalFileLabel'}}</label>
+ <div class="col-md-6">
+ {{#file-picker fileLoaded="onFileChanged" preview=false}}
+ <div class="text-center vert-align-middle">
+ {{fa-icon "cloud-upload" size="4"}}
+ <h4> Drag file to upload or click to browse</h4>
+ </div>
+ {{/file-picker}}
+ </div>
+ </div>
+ </div>
+
+ <!--
+ <button class="btn btn-success" {{action
+ "toggleDirectoryViewer"}}>{{t 'hive.ui.fileSource.selectHdfsLocation'}}</button>
+ -->
+ </div>
+ <!--
+ {{#if showDirectoryViewer}}
+ {{hdfs-viewer-modal
+ showSelectedPath=true
+ close="closeHdfsModal"
+ selected="hdfsPath"
+ }}
+ {{/if}}
+ -->
+ </div>
+ {{/if}}
+ </div>
+ {{/if}}
+</div>
+
+{{yield}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table.hbs
new file mode 100644
index 0000000..e4388f0
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/upload-table.hbs
@@ -0,0 +1,59 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div class="table-body">
+ {{csv-format-params fileFormatInfo=fileFormatInfo}}
+</div>
+
+<div class="table-body">
+ {{upload-table-source onFileChanged='onFileChanged' fileInfo=fileInfo}}
+</div>
+
+
+<div class="create-table-controls">
+ <button class="btn btn-success" {{action
+ "preview"}}>{{fa-icon "eye"}} Preview</button>
+</div>
+
+<div class="table-body">
+ <div class="panel panel-info">
+ <div class="panel-heading">
+ <div class="panel-title">
+ <button class="btn btn-primary {{if showPreview 'active'}}" {{action
+ "toggleShowPreview"}}>
+ {{fa-icon (if showPreview "minus" "plus")}}
+ </button>
+ Table Preview
+ </div>
+ </div>
+ {{#if showPreview}}
+ {{simple-table header=columns rows=rows }}
+ {{/if}}
+ </div>
+</div>
+
+
+<div class="col-md-12 table-info">
+ <div class="table-body">
+ {{create-table tabs=tabs
+ cancel="cancel"
+ create="createAndUpload" columns=columns tableName=tableName }}
+ </div>
+</div>
+
+{{yield}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/templates/components/validated-text-field.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/validated-text-field.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/validated-text-field.hbs
new file mode 100644
index 0000000..7cf0fcf
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/validated-text-field.hbs
@@ -0,0 +1,23 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{!
+* see example in validated-text-field.js component file
+}}
+
+{{input class=inputClass value=inputValue title=message placeholder=placeholder}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
index 39b7a9e..4f3b98a 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
@@ -32,7 +32,11 @@
{{/modal-dialog}}
{{/if}}
<div class="table-header row">
- <p class="text-uppercase">table<strong> > create table</strong></p>
+ <p class="text-uppercase">table<strong> > create table</strong>
+ <div class="pull-right">
+ {{#link-to "databases.database.tables.upload-table" }}<p class="text-uppercase">{{fa-icon "upload"}} upload table</p>{{/link-to}}
+ </div>
+ </p>
</div>
<div class="table-body">
{{create-table tabs=tabs
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/upload-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/upload-table.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/upload-table.hbs
new file mode 100644
index 0000000..0091ede
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/upload-table.hbs
@@ -0,0 +1,45 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+<div class="col-md-12 table-info">
+
+<div class="table-body">
+ <div class="table-header row">
+ <p class="text-uppercase">table<strong> > upload table</strong></p>
+ </div>
+</div>
+
+<div class="col-md-12 table-info">
+{{#if showUploadTableModal}}
+{{#modal-dialog
+translucentOverlay=true
+container-class="modal-dialog modal-sm"}}
+<div class="modal-content">
+ <div class="modal-header text-danger">
+ <p class="modal-title">{{fa-icon "plus"}} Upload Table</p>
+ </div>
+ <div class="modal-body text-center text-primary">
+ <p>{{uploadTableMessage}}</p>
+ </div>
+</div><!-- /.modal-content -->
+{{/modal-dialog}}
+{{/if}}
+</div>
+
+{{upload-table tabs=tabs columns=columns rows=rows tableName=tableName tableMeta=tableMeta cancel="cancel" createAndUpload="uploadTable" preview="preview"}}
+</div>
+{{yield}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/app/utils/constants.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/utils/constants.js b/contrib/views/hive20/src/main/resources/ui/app/utils/constants.js
new file mode 100644
index 0000000..5405773
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/utils/constants.js
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Object.create({
+ /**
+ * This should reflect the naming conventions accross the application.
+ * Changing one value also means changing the filenames for the chain of files
+ * represented by that value (routes, controllers, models etc).
+ * This dependency goes both ways.
+ */
+ namingConventions: {
+ routes: {
+ },
+
+ subroutes: {
+ },
+
+ job: 'job'
+ },
+
+ services: {
+ alertMessages: 'alert-messages',
+ jobs: 'jobs',
+ },
+
+ jobReferrer: {
+ sample: 'SAMPLE',
+ explain: 'EXPLAIN',
+ visualExplain: 'VISUALEXPLAIN',
+ job: 'JOB',
+ user: 'USER',
+ internal: 'INTERNAL'
+ },
+
+ statuses: {
+ unknown: "UNKNOWN",
+ initialized: "INITIALIZED",
+ running: "RUNNING",
+ succeeded: "SUCCEEDED",
+ canceled: "CANCELED",
+ closed: "CLOSED",
+ error: "ERROR",
+ failed: 'FAILED',
+ killed: 'KILLED',
+ pending: "PENDING"
+ },
+});
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/bower.json b/contrib/views/hive20/src/main/resources/ui/bower.json
index 4eadee7..a4ce788 100644
--- a/contrib/views/hive20/src/main/resources/ui/bower.json
+++ b/contrib/views/hive20/src/main/resources/ui/bower.json
@@ -6,6 +6,7 @@
"ember-qunit-notifications": "0.1.0",
"font-awesome": "~4.5.0",
"codemirror": "~5.15.0",
- "bootstrap-treeview": "~1.2.0"
+ "bootstrap-treeview": "~1.2.0",
+ "blob": "*"
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/config/environment.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/config/environment.js b/contrib/views/hive20/src/main/resources/ui/config/environment.js
index 9efd491..98b3c8c 100644
--- a/contrib/views/hive20/src/main/resources/ui/config/environment.js
+++ b/contrib/views/hive20/src/main/resources/ui/config/environment.js
@@ -60,5 +60,9 @@ module.exports = function(environment) {
}
+ ENV.i18n = {
+ defaultLocale: 'en'
+ };
+
return ENV;
};
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/main/resources/ui/package.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/package.json b/contrib/views/hive20/src/main/resources/ui/package.json
index ed7a299..a066bfd 100644
--- a/contrib/views/hive20/src/main/resources/ui/package.json
+++ b/contrib/views/hive20/src/main/resources/ui/package.json
@@ -30,6 +30,7 @@
"ember-cli-babel": "^5.1.6",
"ember-cli-daterangepicker": "0.3.0",
"ember-cli-dependency-checker": "^1.2.0",
+ "ember-cli-file-picker": "0.0.10",
"ember-cli-flash": "1.4.0",
"ember-cli-htmlbars": "^1.0.3",
"ember-cli-htmlbars-inline-precompile": "^0.3.1",
@@ -46,6 +47,7 @@
"ember-data": "^2.7.0",
"ember-export-application-global": "^1.0.5",
"ember-font-awesome": "2.2.0",
+ "ember-i18n": "4.5.0",
"ember-light-table": "1.8.0",
"ember-load-initializers": "^0.5.1",
"ember-modal-dialog": "0.9.0",
@@ -54,6 +56,7 @@
"ember-resolver": "^2.0.3",
"ember-responsive": "2.0.0",
"ember-sass-bootstrap": "0.1.2",
+ "ember-uploader": "1.2.3",
"loader.js": "^4.0.1"
},
"ember-addon": {
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
index 8006e91..5939b03 100644
--- a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserCSVTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive20.resources.upload;
import org.apache.ambari.view.hive20.client.ColumnDescription;
import org.apache.ambari.view.hive20.client.ColumnDescriptionShort;
import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser;
import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
@@ -54,8 +55,8 @@ public class DataParserCSVTest {
Assert.assertNotNull(pd.getHeader());
Assert.assertEquals(2, pd.getPreviewRows().size()); // now it will not return the first row which is header
Assert.assertEquals(2, pd.getHeader().size());
- ColumnDescription[] cd = {new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
- new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+ ColumnInfo[] cd = {new ColumnInfo("1", ColumnDescriptionShort.DataTypes.INT.toString()),
+ new ColumnInfo("a", ColumnDescriptionShort.DataTypes.CHAR.toString())};
Object cols2[] = new Object[2];
cols2[0] = "2";
@@ -102,15 +103,15 @@ public class DataParserCSVTest {
PreviewData pd = dp.parsePreview();
Assert.assertNotNull(pd.getHeader());
Assert.assertEquals(4, pd.getHeader().size());
- ColumnDescription[] cd = {
+ ColumnInfo[] cd = {
// as row 3 contains 2.2
- new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 0),
+ new ColumnInfo("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString()),
// as all are chars
- new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1),
+ new ColumnInfo("a", ColumnDescriptionShort.DataTypes.CHAR.toString()),
// as row 4 contains abc
- new ColumnDescriptionImpl("10", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+ new ColumnInfo("10", ColumnDescriptionShort.DataTypes.STRING.toString()),
// although row 1 contains k but it is in header and not counted in detecting datatype
- new ColumnDescriptionImpl("k", ColumnDescriptionShort.DataTypes.INT.toString(), 3)};
+ new ColumnInfo("k", ColumnDescriptionShort.DataTypes.INT.toString())};
Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
}
@@ -146,16 +147,16 @@ public class DataParserCSVTest {
PreviewData pd = dp.parsePreview();
Assert.assertNotNull(pd.getHeader());
Assert.assertEquals(4, pd.getHeader().size());
- ColumnDescription[] cd = {
+ ColumnInfo[] cd = {
// as row 3 contains 2.2
- new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 0),
+ new ColumnInfo("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString()),
// as all are chars
- new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1),
+ new ColumnInfo("a", ColumnDescriptionShort.DataTypes.CHAR.toString()),
// some are int, char and some double .. nothing other than 'string' satisfies all the rows
- new ColumnDescriptionImpl("10", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+ new ColumnInfo("10", ColumnDescriptionShort.DataTypes.STRING.toString()),
// although row 1 contains k but it is in header and not counted in detecting datatype
// but row 2 also has a char p which will be acconted for datatype detection
- new ColumnDescriptionImpl("k", ColumnDescriptionShort.DataTypes.CHAR.toString(), 3)};
+ new ColumnInfo("k", ColumnDescriptionShort.DataTypes.CHAR.toString())};
Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
}
@@ -184,8 +185,8 @@ public class DataParserCSVTest {
Assert.assertNotNull(pd.getHeader());
Assert.assertEquals(1, pd.getPreviewRows().size());
Assert.assertEquals(2, pd.getHeader().size());
- ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
- new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+ ColumnInfo[] cd = {new ColumnInfo("column1", ColumnDescriptionShort.DataTypes.INT.toString()),
+ new ColumnInfo("column2", ColumnDescriptionShort.DataTypes.CHAR.toString())};
Object cols1[] = new Object[2];
cols1[0] = "1";
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java
index a15e5d4..2c7e5e8 100644
--- a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive20.resources.upload;
import org.apache.ambari.view.hive20.client.ColumnDescription;
import org.apache.ambari.view.hive20.client.ColumnDescriptionShort;
import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser;
import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
@@ -71,20 +72,20 @@ public class DataParserJSONTest {
Assert.assertNotNull(pd.getHeader());
Assert.assertEquals(7, pd.getPreviewRows().size()); // header row + preview rows
Assert.assertEquals(14, pd.getHeader().size());
- ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
- new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
- new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
- new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.STRING.toString(), 3),
- new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.STRING.toString(), 4),
- new ColumnDescriptionImpl("col6", ColumnDescriptionShort.DataTypes.STRING.toString(), 5),
- new ColumnDescriptionImpl("col7", ColumnDescriptionShort.DataTypes.STRING.toString(), 6),
- new ColumnDescriptionImpl("col8", ColumnDescriptionShort.DataTypes.STRING.toString(), 7),
- new ColumnDescriptionImpl("col9", ColumnDescriptionShort.DataTypes.STRING.toString(), 8),
- new ColumnDescriptionImpl("col10", ColumnDescriptionShort.DataTypes.STRING.toString(), 9),
- new ColumnDescriptionImpl("col11", ColumnDescriptionShort.DataTypes.STRING.toString(), 10),
- new ColumnDescriptionImpl("col12", ColumnDescriptionShort.DataTypes.STRING.toString(), 11),
- new ColumnDescriptionImpl("col13", ColumnDescriptionShort.DataTypes.STRING.toString(), 12),
- new ColumnDescriptionImpl("col14", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 13)};
+ ColumnInfo[] cd = {new ColumnInfo("col1", ColumnDescriptionShort.DataTypes.CHAR.toString()),
+ new ColumnInfo("col2", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col3", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col4", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col5", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col6", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col7", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col8", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col9", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col10", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col11", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col12", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col13", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col14", ColumnDescriptionShort.DataTypes.DOUBLE.toString())};
Row row2 = new Row(new Object[]{"a", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "4.4"});
Row row3 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "5.4"});
@@ -207,8 +208,8 @@ public class DataParserJSONTest {
Assert.assertNotNull(pd.getHeader());
Assert.assertEquals(1, pd.getPreviewRows().size());
Assert.assertEquals(2, pd.getHeader().size());
- ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
- new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)};
+ ColumnInfo[] cd = {new ColumnInfo("col1", ColumnDescriptionShort.DataTypes.CHAR.toString()),
+ new ColumnInfo("col2", ColumnDescriptionShort.DataTypes.STRING.toString())};
Object cols1[] = new Object[2];
cols1[0] = "d";
@@ -246,8 +247,8 @@ public class DataParserJSONTest {
Assert.assertNotNull(pd.getHeader());
Assert.assertEquals(1, pd.getPreviewRows().size());
Assert.assertEquals(2, pd.getHeader().size());
- ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
- new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)};
+ ColumnInfo[] cd = {new ColumnInfo("column1", ColumnDescriptionShort.DataTypes.CHAR.toString()),
+ new ColumnInfo("column2", ColumnDescriptionShort.DataTypes.STRING.toString())};
Object cols1[] = new Object[2];
cols1[0] = "d";
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java
index 07e7c7c..bcdcfc0 100644
--- a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java
@@ -21,7 +21,7 @@ package org.apache.ambari.view.hive20.resources.upload;
import org.apache.ambari.view.hive20.client.ColumnDescription;
import org.apache.ambari.view.hive20.client.ColumnDescriptionShort;
import org.apache.ambari.view.hive20.client.Row;
-import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser;
import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions;
import org.apache.ambari.view.hive20.resources.uploads.parsers.PreviewData;
@@ -68,11 +68,11 @@ public class DataParserXMLTest {
Assert.assertNotNull(pd.getHeader());
Assert.assertEquals(2, pd.getPreviewRows().size()); // header row + preview rows
Assert.assertEquals(5, pd.getHeader().size());
- ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
- new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
- new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
- new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.INT.toString(), 3),
- new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.INT.toString(), 4)
+ ColumnInfo[] cd = {new ColumnInfo("col1", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col2", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col3", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col4", ColumnDescriptionShort.DataTypes.INT.toString()),
+ new ColumnInfo("col5", ColumnDescriptionShort.DataTypes.INT.toString())
};
Row row2 = new Row(new Object[]{"row1-col1-Value", "row1-col2-Value", "row1-col3-Value", "10", "11"});
@@ -236,8 +236,8 @@ public class DataParserXMLTest {
Assert.assertNotNull(pd.getHeader());
Assert.assertEquals(1, pd.getPreviewRows().size());
Assert.assertEquals(2, pd.getHeader().size());
- ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
- new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)};
+ ColumnInfo[] cd = {new ColumnInfo("col1", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("col2", ColumnDescriptionShort.DataTypes.INT.toString())};
Object cols1[] = new Object[2];
cols1[0] = "row1-col1-Value";
@@ -278,8 +278,8 @@ public class DataParserXMLTest {
Assert.assertNotNull(pd.getHeader());
Assert.assertEquals(1, pd.getPreviewRows().size());
Assert.assertEquals(2, pd.getHeader().size());
- ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
- new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)};
+ ColumnInfo[] cd = {new ColumnInfo("column1", ColumnDescriptionShort.DataTypes.STRING.toString()),
+ new ColumnInfo("column2", ColumnDescriptionShort.DataTypes.INT.toString())};
Object cols1[] = new Object[2];
cols1[0] = "row1-col1-Value";
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java
deleted file mode 100644
index 1a0d34a..0000000
--- a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive20.resources.upload;
-
-import org.apache.ambari.view.hive.resources.uploads.query.RowFormat;
-import org.apache.ambari.view.hive20.client.ColumnDescription;
-import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
-import org.apache.ambari.view.hive20.resources.uploads.HiveFileType;
-import org.apache.ambari.view.hive20.resources.uploads.query.DeleteQueryInput;
-import org.apache.ambari.view.hive20.resources.uploads.query.InsertFromQueryInput;
-import org.apache.ambari.view.hive20.resources.uploads.query.QueryGenerator;
-import org.apache.ambari.view.hive20.resources.uploads.query.TableInfo;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class QueryGeneratorTest {
- @Test
- public void testCreateTextFile() {
-
- List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
- cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
- cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
- cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
- cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
- cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
-
- TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.TEXTFILE, new RowFormat(',', '\\'));
-
- QueryGenerator qg = new QueryGenerator();
- Assert.assertEquals("Create query for text file not correct ","CREATE TABLE tableName (col1 CHAR(10), col2 STRING," +
- " col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','" +
- " ESCAPED BY '\\\\' STORED AS TEXTFILE;",qg.generateCreateQuery(ti));
- }
-
- @Test
- public void testCreateORC() {
-
- List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
- cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
- cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
- cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
- cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
- cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
-
- TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.ORC, new RowFormat(',', '\\'));
-
- QueryGenerator qg = new QueryGenerator();
- Assert.assertEquals("Create query for text file not correct ","CREATE TABLE tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) STORED AS ORC;",qg.generateCreateQuery(ti));
- }
-
- @Test
- public void testInsertWithoutUnhexFromQuery() {
- List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
- cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
- cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
- cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
- cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
- cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
-
- InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable", cdl, Boolean.FALSE);
-
- QueryGenerator qg = new QueryGenerator();
- Assert.assertEquals("insert from one table to another not correct ","INSERT INTO TABLE toDB.toTable SELECT col1, col2, col3, col4, col5 FROM fromDB.fromTable;",qg.generateInsertFromQuery(ifqi));
- }
-
- @Test
- public void testInsertWithUnhexFromQuery() {
- List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
- cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
- cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
- cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
- cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
- cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
-
- InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable", cdl, Boolean.TRUE);
-
- QueryGenerator qg = new QueryGenerator();
- Assert.assertEquals("insert from one table to another not correct ","INSERT INTO TABLE toDB.toTable SELECT UNHEX(col1), UNHEX(col2), col3, UNHEX(col4), col5 FROM fromDB.fromTable;",qg.generateInsertFromQuery(ifqi));
- }
-
- @Test
- public void testDropTableQuery() {
-
- DeleteQueryInput deleteQueryInput = new DeleteQueryInput("dbName","tableName");
-
- QueryGenerator qg = new QueryGenerator();
- Assert.assertEquals("drop table query not correct ","DROP TABLE dbName.tableName;",qg.generateDropTableQuery(deleteQueryInput ));
- }
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/53e6c8d4/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java
index 340a921..325aed8 100644
--- a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java
+++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java
@@ -20,6 +20,7 @@ package org.apache.ambari.view.hive20.resources.upload;
import org.apache.ambari.view.hive20.client.ColumnDescription;
import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl;
import org.apache.ambari.view.hive20.resources.uploads.TableDataReader;
import org.junit.Assert;
@@ -78,9 +79,9 @@ public class TableDataReaderTest {
@Test
public void testCSVReader() throws IOException {
RowIter rowIter = new RowIter(10,10);
- List<ColumnDescriptionImpl> colDescs = new LinkedList<>();
+ List<ColumnInfo> colDescs = new LinkedList<>();
for(int i = 0 ; i < 10 ; i++ ) {
- ColumnDescriptionImpl cd = new ColumnDescriptionImpl("col" + (i+1) , ColumnDescription.DataTypes.STRING.toString(), i);
+ ColumnInfo cd = new ColumnInfo("col" + (i+1) , ColumnDescription.DataTypes.STRING.toString());
colDescs.add(cd);
}