You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by pa...@apache.org on 2016/06/30 09:35:29 UTC
[2/2] ambari git commit: AMBARI-17480. Hive2 view : port changes of
following 10 bugs in hive2 view (Nitiraj Rathore via pallavkul)
AMBARI-17480. Hive2 view : port changes of following 10 bugs in hive2 view (Nitiraj Rathore via pallavkul)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ab362d06
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ab362d06
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ab362d06
Branch: refs/heads/trunk
Commit: ab362d0680532e2be430bde22a852292fae47a35
Parents: 45338f5
Author: Pallav Kulshreshtha <pa...@gmail.com>
Authored: Thu Jun 30 15:04:54 2016 +0530
Committer: Pallav Kulshreshtha <pa...@gmail.com>
Committed: Thu Jun 30 15:04:54 2016 +0530
----------------------------------------------------------------------
.../hive2/persistence/DataStoreStorage.java | 40 +--
.../view/hive2/resources/jobs/Aggregator.java | 6 +-
.../view/hive2/resources/jobs/JobService.java | 3 +
.../resources/uploads/UploadFromHdfsInput.java | 16 +-
.../hive2/resources/uploads/UploadService.java | 166 ++++++----
.../resources/uploads/parsers/DataParser.java | 5 +-
.../resources/uploads/parsers/IParser.java | 6 +-
.../resources/uploads/parsers/ParseUtils.java | 76 ++++-
.../hive2/resources/uploads/parsers/Parser.java | 78 +++--
.../uploads/parsers/csv/CSVParser.java | 7 +-
.../uploads/parsers/json/JSONParser.java | 2 +-
.../uploads/parsers/xml/XMLParser.java | 2 +-
.../ui/hive-web/app/adapters/file-upload.js | 1 -
.../ui/hive-web/app/adapters/upload-table.js | 6 +-
.../ui/hive-web/app/components/file-upload.js | 11 +-
.../app/components/validated-text-field.js | 60 ++++
.../ui/hive-web/app/controllers/index.js | 10 +-
.../ui/hive-web/app/controllers/upload-table.js | 253 +++++++-------
.../ui/hive-web/app/initializers/i18n.js | 62 +++-
.../resources/ui/hive-web/app/routes/history.js | 9 +-
.../resources/ui/hive-web/app/services/job.js | 8 +-
.../resources/ui/hive-web/app/styles/app.scss | 4 +
.../components/validated-text-field.hbs | 23 ++
.../ui/hive-web/app/templates/upload-table.hbs | 56 ++--
.../hive2/resources/upload/CSVParserTest.java | 109 +++++++
.../resources/upload/DataParserCSVTest.java | 326 +++++++++++++++++++
.../resources/upload/DataParserJSONTest.java | 263 +++++++++++++++
.../resources/upload/DataParserXMLTest.java | 295 +++++++++++++++++
.../hive2/resources/upload/JsonParserTest.java | 146 +++++++++
.../resources/upload/QueryGeneratorTest.java | 84 +++++
.../resources/upload/TableDataReaderTest.java | 127 ++++++++
.../hive2/resources/upload/XMLParserTest.java | 135 ++++++++
32 files changed, 2068 insertions(+), 327 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/DataStoreStorage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/DataStoreStorage.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/DataStoreStorage.java
index 9adf129..a54a82e 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/DataStoreStorage.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/DataStoreStorage.java
@@ -54,36 +54,17 @@ public class DataStoreStorage implements Storage {
@Override
public synchronized void store(Class model, Indexed obj) {
- assignId(model, obj);
-
- Indexed newBean;
- try {
- newBean = (Indexed) BeanUtils.cloneBean(obj);
- } catch (IllegalAccessException e) {
- throw new ServiceFormattedException("S010 Data storage error", e);
- } catch (InstantiationException e) {
- throw new ServiceFormattedException("S010 Data storage error", e);
- } catch (InvocationTargetException e) {
- throw new ServiceFormattedException("S010 Data storage error", e);
- } catch (NoSuchMethodException e) {
- throw new ServiceFormattedException("S010 Data storage error", e);
- }
- preprocessEntity(newBean);
try {
+ Indexed newBean = (Indexed) BeanUtils.cloneBean(obj);
+ preprocessEntity(newBean);
context.getDataStore().store(newBean);
- } catch (PersistenceException e) {
+ obj.setId(newBean.getId());
+ } catch (Exception e) {
throw new ServiceFormattedException("S020 Data storage error", e);
}
}
- public void assignId(Class model, Indexed obj) {
- if (obj.getId() == null) {
- String id = nextIdForEntity(context, model);
- obj.setId(id);
- }
- }
-
private void preprocessEntity(Indexed obj) {
cleanTransientFields(obj);
}
@@ -103,19 +84,6 @@ public class DataStoreStorage implements Storage {
}
}
- private static synchronized String nextIdForEntity(ViewContext context, Class aClass) {
- // auto increment id implementation
- String lastId = context.getInstanceData(aClass.getName());
- int newId;
- if (lastId == null) {
- newId = 1;
- } else {
- newId = Integer.parseInt(lastId) + 1;
- }
- context.putInstanceData(aClass.getName(), String.valueOf(newId));
- return String.valueOf(newId);
- }
-
@Override
public synchronized <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound {
LOG.debug(String.format("Loading %s #%s", model.getName(), id));
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java
index 4293b1b..f184150 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java
@@ -73,9 +73,9 @@ public class Aggregator {
public List<Job> readAll(String username) {
Set<String> addedOperationIds = new HashSet<>();
- List<Job> allJobs = new LinkedList<>();
- for (HiveQueryId atsHiveQuery : ats.getHiveQueryIdsList(username)) {
-
+ List<Job> allJobs = new LinkedList<Job>();
+ List<HiveQueryId> queries = ats.getHiveQueryIdsList(username);
+ for (HiveQueryId atsHiveQuery : queries) {
TezDagId atsTezDag = getTezDagFromHiveQueryId(atsHiveQuery);
JobImpl atsJob;
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java
index 975036e..3bc396d 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java
@@ -500,10 +500,13 @@ public class JobService extends BaseService {
return Response.ok(jobObject).status(201).build();
} catch (WebApplicationException ex) {
+ LOG.error("Error occurred while creating job : ",ex);
throw ex;
} catch (ItemNotFound itemNotFound) {
+ LOG.error("Error occurred while creating job : ",itemNotFound);
throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
} catch (Throwable ex) {
+ LOG.error("Error occurred while creating job : ",ex);
throw new ServiceFormattedException(ex.getMessage(), ex);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadFromHdfsInput.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadFromHdfsInput.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadFromHdfsInput.java
index 7a7f77c..8214579 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadFromHdfsInput.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadFromHdfsInput.java
@@ -21,7 +21,7 @@ package org.apache.ambari.view.hive2.resources.uploads;
import java.io.Serializable;
public class UploadFromHdfsInput implements Serializable{
- private Boolean isFirstRowHeader;
+ private Boolean isFirstRowHeader = Boolean.FALSE;
private String inputFileType;
private String hdfsPath;
private String tableName;
@@ -80,12 +80,12 @@ public class UploadFromHdfsInput implements Serializable{
@Override
public String toString() {
- return "UploadFromHdfsInput{" +
- "isFirstRowHeader=" + isFirstRowHeader +
- ", inputFileType='" + inputFileType + '\'' +
- ", hdfsPath='" + hdfsPath + '\'' +
- ", tableName='" + tableName + '\'' +
- ", databaseName='" + databaseName + '\'' +
- '}';
+ return new StringBuilder("UploadFromHdfsInput{" )
+ .append("isFirstRowHeader=").append( isFirstRowHeader )
+ .append(", inputFileType='" ).append(inputFileType)
+ .append(", hdfsPath='").append(hdfsPath)
+ .append(", tableName='").append( tableName )
+ .append(", databaseName='").append(databaseName )
+ .append('}').toString();
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadService.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadService.java
index 59e969d..e0bc083 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadService.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadService.java
@@ -21,7 +21,6 @@ package org.apache.ambari.view.hive2.resources.uploads;
import com.sun.jersey.core.header.FormDataContentDisposition;
import com.sun.jersey.multipart.FormDataParam;
import org.apache.ambari.view.hive2.BaseService;
-import org.apache.ambari.view.hive2.persistence.utils.ItemNotFound;
import org.apache.ambari.view.hive2.resources.jobs.viewJobs.Job;
import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobController;
import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobImpl;
@@ -29,7 +28,11 @@ import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobResourceManager;
import org.apache.ambari.view.hive2.resources.uploads.parsers.DataParser;
import org.apache.ambari.view.hive2.resources.uploads.parsers.ParseOptions;
import org.apache.ambari.view.hive2.resources.uploads.parsers.PreviewData;
-import org.apache.ambari.view.hive2.resources.uploads.query.*;
+import org.apache.ambari.view.hive2.resources.uploads.query.DeleteQueryInput;
+import org.apache.ambari.view.hive2.resources.uploads.query.InsertFromQueryInput;
+import org.apache.ambari.view.hive2.resources.uploads.query.LoadQueryInput;
+import org.apache.ambari.view.hive2.resources.uploads.query.QueryGenerator;
+import org.apache.ambari.view.hive2.resources.uploads.query.TableInfo;
import org.apache.ambari.view.hive2.utils.ServiceFormattedException;
import org.apache.ambari.view.hive2.utils.SharedObjectsFactory;
import org.apache.ambari.view.utils.ambari.AmbariApi;
@@ -37,13 +40,24 @@ import org.apache.commons.io.input.ReaderInputStream;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.json.simple.JSONObject;
-
-import javax.ws.rs.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
-import java.io.*;
-import java.lang.reflect.InvocationTargetException;
-import java.util.*;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
/**
* UI driven end points for creation of new hive table and inserting data into it.
@@ -61,13 +75,16 @@ import java.util.*;
*/
public class UploadService extends BaseService {
+ private final static Logger LOG =
+ LoggerFactory.getLogger(UploadService.class);
+
private AmbariApi ambariApi;
protected JobResourceManager resourceManager;
final private static String HIVE_METASTORE_LOCATION_KEY = "hive.metastore.warehouse.dir";
final private static String HIVE_SITE = "hive-site";
final private static String HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY = HIVE_METASTORE_LOCATION_KEY;
- private static final String HIVE_DEFAULT_METASTORE_LOCATION = "/apps/hive/warehouse" ;
+ private static final String HIVE_DEFAULT_METASTORE_LOCATION = "/apps/hive/warehouse";
final private static String HIVE_DEFAULT_DB = "default";
public void validateForUploadFile(UploadFromHdfsInput input){
@@ -107,17 +124,19 @@ public class UploadService extends BaseService {
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response uploadForPreviewFromHDFS(UploadFromHdfsInput input) {
-
InputStream uploadedInputStream = null;
try {
uploadedInputStream = getHDFSFileStream(input.getHdfsPath());
this.validateForPreview(input);
PreviewData pd = generatePreview(input.getIsFirstRowHeader(), input.getInputFileType(), uploadedInputStream);
String tableName = getBasenameFromPath(input.getHdfsPath());
- return createPreviewResponse(pd, input.getIsFirstRowHeader(),tableName);
+ return createPreviewResponse(pd, input.getIsFirstRowHeader(), tableName);
+ } catch (WebApplicationException e) {
+ LOG.error(getErrorMessage(e), e);
+ throw e;
} catch (Exception e) {
- LOG.error("Exception occurred while generating preview for hdfs file : " + input.getHdfsPath(), e);
- throw new ServiceFormattedException(e.getMessage(), e);
+ LOG.error(e.getMessage(), e);
+ throw new ServiceFormattedException(e);
} finally {
if (null != uploadedInputStream) {
try {
@@ -146,10 +165,13 @@ public class UploadService extends BaseService {
isFirstRowHeader = false;
PreviewData pd = generatePreview(isFirstRowHeader, inputFileType, uploadedInputStream);
- return createPreviewResponse(pd, isFirstRowHeader,getBasename(fileDetail.getFileName()));
+ return createPreviewResponse(pd, isFirstRowHeader, getBasename(fileDetail.getFileName()));
+ } catch (WebApplicationException e) {
+ LOG.error(getErrorMessage(e), e);
+ throw e;
} catch (Exception e) {
- LOG.error("Exception occurred while generating preview for local file", e);
- throw new ServiceFormattedException(e.getMessage(), e);
+ LOG.error(e.getMessage(), e);
+ throw new ServiceFormattedException(e);
}
}
@@ -158,7 +180,7 @@ public class UploadService extends BaseService {
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
- public Response createTable(TableInput tableInput) {
+ public Job createTable(TableInput tableInput) {
try {
tableInput.validate();
List<ColumnDescriptionImpl> header = tableInput.getHeader();
@@ -173,17 +195,15 @@ public class UploadService extends BaseService {
LOG.info("tableCreationQuery : {}", tableCreationQuery);
- Job actualTableJob = createJob(tableCreationQuery, databaseName);
- String actualTableJobId = actualTableJob.getId();
-
- JSONObject jobObject = new JSONObject();
- jobObject.put("jobId", actualTableJobId);
-
- LOG.info("table creation jobId {}", actualTableJobId);
- return Response.ok(jobObject).status(201).build();
+ Job job = createJob(tableCreationQuery, databaseName);
+ LOG.info("job created for table creation {}", job);
+ return job;
+ } catch (WebApplicationException e) {
+ LOG.error(getErrorMessage(e), e);
+ throw e;
} catch (Throwable e) {
- LOG.error("Exception occurred while creating table with input : " + tableInput, e);
- throw new ServiceFormattedException(e.getMessage(), e);
+ LOG.error(e.getMessage(), e);
+ throw new ServiceFormattedException(e);
}
}
@@ -191,40 +211,41 @@ public class UploadService extends BaseService {
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
- public Response uploadFileFromHdfs(UploadFromHdfsInput input ) {
- this.validateForUploadFile(input);
-
- if (ParseOptions.InputFileType.CSV.toString().equals(input.getInputFileType()) && Boolean.FALSE.equals(input.getIsFirstRowHeader())) {
- // upload using the LOAD query
- LoadQueryInput loadQueryInput = new LoadQueryInput(input.getHdfsPath(), input.getDatabaseName(), input.getTableName());
- String loadQuery = new QueryGenerator().generateLoadQuery(loadQueryInput);
-
+ public Response uploadFileFromHdfs(UploadFromHdfsInput input) {
+ if (ParseOptions.InputFileType.CSV.toString().equals(input.getInputFileType()) && input.getIsFirstRowHeader().equals(Boolean.FALSE)) {
try {
- Job job = createJob(loadQuery, input.getDatabaseName());
+ // upload using the LOAD query
+ LoadQueryInput loadQueryInput = new LoadQueryInput(input.getHdfsPath(), input.getDatabaseName(), input.getTableName());
+ String loadQuery = new QueryGenerator().generateLoadQuery(loadQueryInput);
+ Job job = createJob(loadQuery, input.getDatabaseName());
JSONObject jo = new JSONObject();
jo.put("jobId", job.getId());
-
return Response.ok(jo).build();
+ } catch (WebApplicationException e) {
+ LOG.error(getErrorMessage(e), e);
+ throw e;
} catch (Throwable e) {
- LOG.error("Exception occurred while creating job for Load From HDFS query : " + loadQuery, e);
- throw new ServiceFormattedException(e.getMessage(), e);
+ LOG.error(e.getMessage(), e);
+ throw new ServiceFormattedException(e);
}
-
} else {
// create stream and upload
InputStream hdfsStream = null;
try {
hdfsStream = getHDFSFileStream(input.getHdfsPath());
- String path = uploadFileFromStream(hdfsStream, input.getIsFirstRowHeader(),input.getInputFileType(),input.getTableName(), input.getDatabaseName());
+ String path = uploadFileFromStream(hdfsStream, input.getIsFirstRowHeader(), input.getInputFileType(), input.getTableName(), input.getDatabaseName());
JSONObject jo = new JSONObject();
jo.put("uploadedPath", path);
return Response.ok(jo).build();
+ } catch (WebApplicationException e) {
+ LOG.error(getErrorMessage(e), e);
+ throw e;
} catch (Exception e) {
- LOG.error("Exception occurred while uploading the file from HDFS with path : " + input.getHdfsPath(), e);
- throw new ServiceFormattedException(e.getMessage(), e);
+ LOG.error(e.getMessage(), e);
+ throw new ServiceFormattedException(e);
} finally {
if (null != hdfsStream)
try {
@@ -249,14 +270,17 @@ public class UploadService extends BaseService {
@FormDataParam("databaseName") String databaseName
) {
try {
-
- String path = uploadFileFromStream(uploadedInputStream,isFirstRowHeader,inputFileType,tableName,databaseName);
+ String path = uploadFileFromStream(uploadedInputStream, isFirstRowHeader, inputFileType, tableName, databaseName);
JSONObject jo = new JSONObject();
jo.put("uploadedPath", path);
return Response.ok(jo).build();
+ } catch (WebApplicationException e) {
+ LOG.error(getErrorMessage(e), e);
+ throw e;
} catch (Exception e) {
- throw new ServiceFormattedException(e.getMessage(), e);
+ LOG.error(e.getMessage(), e);
+ throw new ServiceFormattedException(e);
}
}
@@ -264,19 +288,20 @@ public class UploadService extends BaseService {
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
- public Response insertFromTempTable(InsertFromQueryInput input) {
+ public Job insertFromTempTable(InsertFromQueryInput input) {
try {
String insertQuery = generateInsertFromQuery(input);
LOG.info("insertQuery : {}", insertQuery);
Job job = createJob(insertQuery, "default");
-
- JSONObject jo = new JSONObject();
- jo.put("jobId", job.getId());
-
- return Response.ok(jo).build();
+ LOG.info("Job created for insert from temp table : {}", job);
+ return job;
+ } catch (WebApplicationException e) {
+ LOG.error(getErrorMessage(e), e);
+ throw e;
} catch (Throwable e) {
- throw new ServiceFormattedException(e.getMessage(), e);
+ LOG.error(e.getMessage(), e);
+ throw new ServiceFormattedException(e);
}
}
@@ -284,19 +309,20 @@ public class UploadService extends BaseService {
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
- public Response deleteTable(DeleteQueryInput input) {
+ public Job deleteTable(DeleteQueryInput input) {
try {
String deleteQuery = generateDeleteQuery(input);
LOG.info("deleteQuery : {}", deleteQuery);
Job job = createJob(deleteQuery, "default");
-
- JSONObject jo = new JSONObject();
- jo.put("jobId", job.getId());
-
- return Response.ok(jo).build();
+ LOG.info("Job created for delete temp table : {} ", job);
+ return job;
+ } catch (WebApplicationException e) {
+ LOG.error(getErrorMessage(e), e);
+ throw e;
} catch (Throwable e) {
- throw new ServiceFormattedException(e.getMessage(), e);
+ LOG.error(e.getMessage(), e);
+ throw new ServiceFormattedException(e);
}
}
@@ -319,8 +345,12 @@ public class UploadService extends BaseService {
uploadFile(fullPath, new ReaderInputStream(reader));
return fullPath;
+ } catch (WebApplicationException e) {
+ LOG.error(getErrorMessage(e), e);
+ throw e;
} catch (Exception e) {
- throw new ServiceFormattedException(e.getMessage(), e);
+ LOG.error(e.getMessage(), e);
+ throw new ServiceFormattedException(e);
}
}
@@ -371,9 +401,9 @@ public class UploadService extends BaseService {
private String getHiveMetaStoreLocation() {
String dir = context.getProperties().get(HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY);
- if(dir != null && !dir.trim().isEmpty()){
+ if (dir != null && !dir.trim().isEmpty()) {
return dir;
- }else{
+ } else {
LOG.debug("Neither found associated cluster nor found the view property {}. Returning default location : {}", HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY, HIVE_DEFAULT_METASTORE_LOCATION);
return HIVE_DEFAULT_METASTORE_LOCATION;
}
@@ -390,6 +420,12 @@ public class UploadService extends BaseService {
out.close();
}
+ private static String getErrorMessage(WebApplicationException e) {
+ if (null != e.getResponse() && null != e.getResponse().getEntity())
+ return e.getResponse().getEntity().toString();
+ else return e.getMessage();
+ }
+
private PreviewData generatePreview(Boolean isFirstRowHeader, String inputFileType, InputStream uploadedInputStream) throws Exception {
ParseOptions parseOptions = new ParseOptions();
parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, inputFileType);
@@ -452,10 +488,10 @@ public class UploadService extends BaseService {
return getBasename(fileName);
}
- private String getBasename(String fileName){
+ private String getBasename(String fileName) {
int index = fileName.indexOf(".");
- if(index != -1){
- return fileName.substring(0,index);
+ if (index != -1) {
+ return fileName.substring(0, index);
}
return fileName;
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/DataParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/DataParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/DataParser.java
index 6edc97c..0aec39f 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/DataParser.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/DataParser.java
@@ -23,7 +23,6 @@ import org.apache.ambari.view.hive2.resources.uploads.parsers.csv.CSVParser;
import org.apache.ambari.view.hive2.resources.uploads.parsers.json.JSONParser;
import org.apache.ambari.view.hive2.resources.uploads.parsers.xml.XMLParser;
-import java.io.IOException;
import java.io.Reader;
import java.util.Iterator;
@@ -35,7 +34,7 @@ public class DataParser implements IParser {
private IParser parser;
- public DataParser(Reader reader, ParseOptions parseOptions) throws IOException {
+ public DataParser(Reader reader, ParseOptions parseOptions) throws Exception {
if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.CSV.toString())) {
parser = new CSVParser(reader, parseOptions);
} else if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.JSON.toString())) {
@@ -61,7 +60,7 @@ public class DataParser implements IParser {
}
@Override
- public void close() throws IOException {
+ public void close() throws Exception {
parser.close();
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/IParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/IParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/IParser.java
index 401ef48..dc0c552 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/IParser.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/IParser.java
@@ -20,14 +20,13 @@ package org.apache.ambari.view.hive2.resources.uploads.parsers;
import org.apache.ambari.view.hive2.client.Row;
-import java.io.IOException;
import java.io.Reader;
/**
* Interface defining methods for Parsers that can used for generating preview
* and uploading table into hive.
*/
-public interface IParser extends Iterable<Row> {
+public interface IParser extends Iterable<Row>, AutoCloseable{
/**
* @return returns the Reader that can be read to get the table data as CSV Text Data that can be uploaded directly
@@ -38,7 +37,4 @@ public interface IParser extends Iterable<Row> {
PreviewData parsePreview();
Row extractHeader();
-
- void close() throws IOException;
-
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseUtils.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseUtils.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseUtils.java
index fefacce..d00dc24 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseUtils.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseUtils.java
@@ -18,15 +18,24 @@
package org.apache.ambari.view.hive2.resources.uploads.parsers;
-import org.apache.ambari.view.hive2.client.ColumnDescription;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.text.SimpleDateFormat;
import java.util.Date;
+import java.util.List;
+
+import static org.apache.ambari.view.hive2.client.ColumnDescription.DataTypes;
public class ParseUtils {
+ protected final static Logger LOG =
+ LoggerFactory.getLogger(ParseUtils.class);
+
final public static String[] DATE_FORMATS = {"mm/dd/yyyy", "dd/mm/yyyy", "mm-dd-yyyy" /*add more formatss*/};
+ final public static DataTypes [] dataTypeList = {DataTypes.BOOLEAN,DataTypes.INT,DataTypes.BIGINT,DataTypes.DOUBLE,DataTypes.CHAR,DataTypes.DATE,DataTypes.STRING};
+
public static boolean isInteger(Object object) {
if (object == null)
return false;
@@ -56,6 +65,12 @@ public class ParseUtils {
return false;
}
+ public static boolean isString(Object object) {
+ if (object == null)
+ return false;
+ else return true; // any non null can always be interpreted as a string
+ }
+
public static boolean isLong(Object object) {
if (object == null)
return false;
@@ -119,15 +134,56 @@ public class ParseUtils {
return false;
}
- public static ColumnDescription.DataTypes detectHiveDataType(Object object) {
+ public static DataTypes detectHiveDataType(Object object) {
// detect Integer
- if (isInteger(object)) return ColumnDescription.DataTypes.INT;
- if (isLong(object)) return ColumnDescription.DataTypes.BIGINT;
- if (isBoolean(object)) return ColumnDescription.DataTypes.BOOLEAN;
- if (isDouble(object)) return ColumnDescription.DataTypes.DOUBLE;
- if (isDate(object)) return ColumnDescription.DataTypes.DATE;
- if (isChar(object)) return ColumnDescription.DataTypes.CHAR;
-
- return ColumnDescription.DataTypes.STRING;
+ if (isBoolean(object)) return DataTypes.BOOLEAN;
+ if (isInteger(object)) return DataTypes.INT;
+ if (isLong(object)) return DataTypes.BIGINT;
+ if (isDouble(object)) return DataTypes.DOUBLE;
+ if (isChar(object)) return DataTypes.CHAR;
+ if (isDate(object)) return DataTypes.DATE;
+
+ return DataTypes.STRING;
+ }
+
+ public static boolean checkDatatype( Object object, DataTypes datatype){
+ switch(datatype){
+
+ case BOOLEAN :
+ return isBoolean(object);
+ case INT :
+ return isInteger(object);
+ case BIGINT :
+ return isLong(object);
+ case DOUBLE:
+ return isDouble(object);
+ case CHAR:
+ return isChar(object);
+ case DATE:
+ return isDate(object);
+ case STRING:
+ return isString(object);
+
+ default:
+ LOG.error("this datatype detection is not supported : {}", datatype);
+ return false;
+ }
+ }
+
+ public static DataTypes detectHiveColumnDataType(List<Object> colValues) {
+ boolean found = true;
+ for(DataTypes datatype : dataTypeList){
+ found = true;
+ for(Object object : colValues){
+ if(!checkDatatype(object,datatype)){
+ found = false;
+ break;
+ }
+ }
+
+ if(found) return datatype;
+ }
+
+ return DataTypes.STRING; //default
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/Parser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/Parser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/Parser.java
index 79c5482..847a4b8 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/Parser.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/Parser.java
@@ -22,6 +22,8 @@ import org.apache.ambari.view.hive2.client.ColumnDescription;
import org.apache.ambari.view.hive2.client.Row;
import org.apache.ambari.view.hive2.resources.uploads.ColumnDescriptionImpl;
import org.apache.ambari.view.hive2.resources.uploads.TableDataReader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.Reader;
import java.util.ArrayList;
@@ -35,6 +37,9 @@ import java.util.NoSuchElementException;
*/
public abstract class Parser implements IParser {
+ protected final static Logger LOG =
+ LoggerFactory.getLogger(Parser.class);
+
protected Reader reader; // same as CSV reader in this case
protected ParseOptions parseOptions;
private int numberOfPreviewRows = 10;
@@ -45,22 +50,23 @@ public abstract class Parser implements IParser {
}
/**
- * returns which datatype was detected for the maximum number of times in the given column
+ * returns which datatype is valid for all the values
+ */
+
+ /**
*
- * @param typeCounts
- * @param colNum
- * @return
+ * @param rows : non empty list of rows
+ * @param colNum : to detect datatype for this column number.
+ * @return data type for that column
*/
- private int getLikelyDataType(int[][] typeCounts, int colNum) {
- int[] colArray = typeCounts[colNum];
- int maxIndex = 0;
- int i = 1;
- for (; i < colArray.length; i++) {
- if (colArray[i] > colArray[maxIndex])
- maxIndex = i;
+ private ColumnDescription.DataTypes getLikelyDataType(List<Row> rows, int colNum) {
+ // order of detection BOOLEAN,INT,BIGINT,DOUBLE,DATE,CHAR,STRING
+ List<Object> colValues = new ArrayList<>(rows.size());
+ for( Row row : rows ){
+ colValues.add(row.getRow()[colNum]);
}
- return maxIndex;
+ return ParseUtils.detectHiveColumnDataType(colValues);
}
@Override
@@ -70,12 +76,15 @@ public abstract class Parser implements IParser {
@Override
public PreviewData parsePreview() {
- List<Row> previewRows;
+ LOG.info("generating preview for : {}", this.parseOptions );
+
+ ArrayList<Row> previewRows;
List<ColumnDescription> header;
try {
numberOfPreviewRows = (Integer) parseOptions.getOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS);
} catch (Exception e) {
+ LOG.debug("Illegal number of preview columns supplied {}",parseOptions.getOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS) );
}
int numberOfRows = numberOfPreviewRows;
@@ -83,42 +92,35 @@ public abstract class Parser implements IParser {
Row headerRow = null;
Integer numOfCols = null;
- int[][] typeCounts = null;
- if (parseOptions.getOption(ParseOptions.OPTIONS_HEADER) != null && parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.FIRST_RECORD.toString())) {
- if (!this.iterator().hasNext()) {
- throw new NoSuchElementException("Cannot parse Header");
- }
+ if (parseOptions.getOption(ParseOptions.OPTIONS_HEADER) != null &&
+ ( parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.FIRST_RECORD.toString()) ||
+ parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.EMBEDDED.toString())
+ )) {
headerRow = extractHeader();
numOfCols = headerRow.getRow().length;
- typeCounts = new int[numOfCols][ColumnDescription.DataTypes.values().length];
- previewRows.add(headerRow);
}
- // find data types.
-
Row r;
if (iterator().hasNext()) {
r = iterator().next();
if( null == numOfCols ) {
numOfCols = r.getRow().length;
- typeCounts = new int[numOfCols][ColumnDescription.DataTypes.values().length];
}
} else {
- throw new NoSuchElementException("No rows in the file.");
+ LOG.error("No rows found in the file. returning error.");
+ throw new NoSuchElementException("No rows in the file.");
}
while (true) {
// create Header definition from row
Object[] values = r.getRow();
-
Object[] newValues= new Object[numOfCols]; // adds null if less columns detected and removes extra columns if any
for (int colNum = 0; colNum < numOfCols; colNum++) {
if(colNum < values.length) {
// detect type
ColumnDescription.DataTypes type = ParseUtils.detectHiveDataType(values[colNum]);
- typeCounts[colNum][type.ordinal()]++;
newValues[colNum] = values[colNum];
}else{
newValues[colNum] = null;
@@ -134,14 +136,25 @@ public abstract class Parser implements IParser {
r = iterator().next();
}
- if (previewRows.size() <= 0)
+ if (previewRows.size() <= 0) {
+ LOG.error("No rows found in the file. returning error.");
throw new NoSuchElementException("Does not contain any rows.");
+ }
+
+ // find data types.
+ header = generateHeader(headerRow,previewRows,numOfCols);
+
+ return new PreviewData(header,previewRows);
+ }
+
+ private List<ColumnDescription> generateHeader(Row headerRow,List<Row> previewRows, int numOfCols) {
+ List<ColumnDescription> header = new ArrayList<>();
- header = new ArrayList<>(numOfCols);
for (int colNum = 0; colNum < numOfCols; colNum++) {
- int dataTypeId = getLikelyDataType(typeCounts, colNum);
- ColumnDescription.DataTypes type = ColumnDescription.DataTypes.values()[dataTypeId];
- String colName = "Column" + colNum;
+ ColumnDescription.DataTypes type = getLikelyDataType(previewRows,colNum);
+ LOG.info("datatype detected for column {} : {}", colNum, type);
+
+ String colName = "Column" + (colNum + 1);
if (null != headerRow)
colName = (String) headerRow.getRow()[colNum];
@@ -149,6 +162,7 @@ public abstract class Parser implements IParser {
header.add(cd);
}
- return new PreviewData(header,previewRows);
+ LOG.debug("return headers : {} ", header);
+ return header;
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVParser.java
index 74cc060..daff342 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVParser.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVParser.java
@@ -22,8 +22,9 @@ import org.apache.ambari.view.hive2.resources.uploads.parsers.ParseOptions;
import org.apache.ambari.view.hive2.resources.uploads.parsers.Parser;
import org.apache.commons.csv.CSVFormat;
-import java.io.*;
-import java.util.*;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Iterator;
/**
* Parses the given Reader which contains CSV stream and extracts headers and rows, and detect datatypes of columns
@@ -45,7 +46,7 @@ public class CSVParser extends Parser {
}
@Override
- public void close() throws IOException {
+ public void close() throws Exception {
this.parser.close();
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONParser.java
index 772b6fd..66e9fcc 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONParser.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONParser.java
@@ -74,7 +74,7 @@ public class JSONParser extends Parser {
}
@Override
- public void close() throws IOException {
+ public void close() throws Exception {
this.jsonReader.close();
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLParser.java
index 51671e5..95e996c 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLParser.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLParser.java
@@ -85,7 +85,7 @@ public class XMLParser extends Parser {
}
@Override
- public void close() throws IOException {
+ public void close() throws Exception {
try {
this.xmlReader.close();
} catch (XMLStreamException e) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/file-upload.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/file-upload.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/file-upload.js
index 1bd8eee..7bb6e0b 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/file-upload.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/file-upload.js
@@ -25,7 +25,6 @@ export default EmberUploader.Uploader.extend({
// Override
_ajax: function(settings) {
settings = Ember.merge(settings, this.getProperties('headers'));
- console.log("_ajax : settings: " + JSON.stringify(settings));
return this._super(settings);
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/upload-table.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/upload-table.js
index ef4df43..76fce50 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/upload-table.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/upload-table.js
@@ -30,9 +30,7 @@ export default application.extend({
uploadFiles: function (path, files, extras) {
var uploadUrl = this.buildUploadURL(path);
- console.log("uplaoder : uploadURL : ", uploadUrl);
- console.log("uploader : extras : ", extras);
- console.log("uploader : files : ", files);
+ console.log("uplaoder : uploadURL : ", uploadUrl, " extras : ", extras , "files : ", files);
var hdrs = Ember.$.extend(true, {},this.get('headers'));
delete hdrs['Content-Type'];
@@ -72,10 +70,8 @@ export default application.extend({
headers: self.get('headers'),
dataType : 'json'
}).done(function(data) {
- console.log( "inside done : data : ", data );
resolve(data);
}).fail(function(error) {
- console.log( "inside fail error : ", error );
reject(error);
});
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/file-upload.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/file-upload.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/file-upload.js
index 1cd05ae..5dc7746 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/file-upload.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/file-upload.js
@@ -19,7 +19,16 @@
import EmberUploader from 'ember-uploader';
export default EmberUploader.FileField.extend({
+ onChangeUploadFiles : function(){
+ if(!this.get("uploadFiles")){
+ // files were cleared by the controller so clear here as well.
+ this.set("files");
+ this.set("value");
+ }
+ }.observes("uploadFiles"),
filesDidChange: function(files) {
- this.sendAction('filesUploaded',files); // sends this action to controller.
+ if( files ) {
+ this.sendAction('filesUploaded', files); // sends this action to controller.
+ }
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/validated-text-field.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/validated-text-field.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/validated-text-field.js
new file mode 100644
index 0000000..50cea36
--- /dev/null
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/validated-text-field.js
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+import Ember from 'ember';
+
+/** Example :
+ * {{#validated-text-field
+ * inputValue=bindedTextValue invalidClass='form-control red-border' validClass='form-control' regex="^[a-z]+$"
+ * allowEmpty=false tooltip="Enter valid word" errorMessage="Please enter valid word" placeholder="Enter Word"}}
+ * {{/validated-text-field}}
+ */
+export default Ember.Component.extend({
+ allowEmpty: true,
+ valid: true,
+ setValid: function () {
+ this.set("valid", true);
+ this.set("inputClass", this.get("validClass"));
+ this.set("message", this.get("tooltip"));
+ },
+ setInvalid: function () {
+ this.set("valid", false);
+ this.set("inputClass", this.get("invalidClass"));
+ this.set("message", this.get("errorMessage"));
+ },
+ onChangeInputValue: function () {
+ var regStr = this.get("regex");
+ var regExp = new RegExp(regStr, "g");
+ if (this.get("inputValue")) {
+ var arr = this.get("inputValue").match(regExp);
+ if (arr != null && arr.length == 1) {
+ this.setValid();
+ }
+ else {
+ this.setInvalid();
+ }
+ } else {
+ if (this.get("allowEmpty")) {
+ this.setValid();
+ } else {
+ this.setInvalid();
+ }
+ }
+ }.observes("inputValue").on('init')
+});
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/index.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/index.js
index 4f3261c..5b7544d 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/index.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/index.js
@@ -141,8 +141,16 @@ export default Ember.Controller.extend({
defer = Ember.RSVP.defer(),
originalModel = this.get('model');
+ var title = "";
+ if(shouldGetVisualExplain){
+ title += "Visual Explain "
+ }else if(shouldExplain){
+ title += "Explain "
+ }
+
+ title += originalModel.get('title');
job = this.store.createRecord(constants.namingConventions.job, {
- title: originalModel.get('title'),
+ title: title,
sessionTag: originalModel.get('sessionTag'),
dataBase: this.get('selectedDatabase.name'),
referrer: referrer
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/upload-table.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/upload-table.js
index cb267a0..84637e5 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/upload-table.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/upload-table.js
@@ -22,16 +22,20 @@ import constants from 'hive/utils/constants';
export default Ember.Controller.extend({
- isLocalUpload : Ember.computed.equal("uploadSource","local"),
- uploadSource : "local",
- hdfsPath : "",
+ COLUMN_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
+ TABLE_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
+ isLocalUpload: Ember.computed.equal("uploadSource", "local"),
+ uploadSource: "local",
+ COLUMN_NAME_PREFIX : "column",
+ hdfsPath: "",
jobService: Ember.inject.service(constants.namingConventions.job),
notifyService: Ember.inject.service(constants.namingConventions.notify),
- needs: ['databases'],
+ databaseService : Ember.inject.service(constants.namingConventions.database),
+ databases : Ember.computed.alias("databaseService.databases"),
showErrors: false,
uploader: Uploader.create(),
baseUrl: "/resources/upload",
- isFirstRowHeader: true, // is first row header
+ isFirstRowHeader: false, // is first row header
header: null, // header received from server
files: null, // files that need to be uploaded only file[0] is relevant
firstRow: [], // the actual first row of the table.
@@ -41,9 +45,26 @@ export default Ember.Controller.extend({
filePath: null,
tableName: null,
uploadProgressInfos : [],
+ DEFAULT_DB_NAME : 'default',
+ showPreview : false,
onChangeUploadSource : function(){
this.clearFields();
}.observes("uploadSource"),
+ setDefaultDB : function(){
+ var self = this;
+ var defaultDatabase = this.get('databases').find(
+ function(item,index){
+ if(item.id == self.DEFAULT_DB_NAME )
+ return true;
+ }
+ );
+
+ console.log("setting the initial database to : " + defaultDatabase);
+ self.set("selectedDatabase",defaultDatabase);
+ },
+ init: function() {
+ this.setDefaultDB();
+ },
uploadProgressInfo : Ember.computed("uploadProgressInfos.[]",function(){
var info = "";
for( var i = 0 ; i < this.get('uploadProgressInfos').length ; i++)
@@ -64,8 +85,9 @@ export default Ember.Controller.extend({
"RCFILE" ,
"ORC" ,
"PARQUET" ,
- "AVRO" ,
- "INPUTFORMAT"
+ "AVRO"
+ //,
+ //"INPUTFORMAT" -- not supported as of now.
],
selectedFileType: "ORC",
dataTypes: [
@@ -84,21 +106,21 @@ export default Ember.Controller.extend({
"VARCHAR", // -- (Note: Available in Hive 0.12.0 and later)
"CHAR" // -- (Note: Available in Hive 0.13.0 and later)
],
+ _setHeaderElements : function(header,valueArray){
+ header.forEach(function (item, index) {
+ Ember.set(item, 'name', valueArray.objectAt(index));
+ }, this);
+ },
isFirstRowHeaderDidChange: function () {
- console.log("inside onFirstRowHeader : isFirstRowHeader : " + this.get('isFirstRowHeader'));
if (this.get('isFirstRowHeader') != null && typeof this.get('isFirstRowHeader') !== 'undefined') {
if (this.get('isFirstRowHeader') == false) {
if (this.get('rows')) {
this.get('rows').unshiftObject({row: this.get('firstRow')});
+ this._setHeaderElements(this.get('header'),this.get('defaultColumnNames'));
}
} else if( this.get('header') ) { // headers are available
// take first row of
- this.get('header').forEach(function (item, index) {
- console.log("item : ", item);
- console.log("this.get('firstRow').objectAt(index) : ", this.get('firstRow').objectAt(index));
- Ember.set(item, 'name', this.get('firstRow')[index]);
- }, this);
-
+ this._setHeaderElements(this.get('header'),this.get('firstRow'));
this.get('rows').removeAt(0);
}
@@ -108,16 +130,13 @@ export default Ember.Controller.extend({
popUploadProgressInfos : function(){
var msg = this.get('uploadProgressInfos').popObject();
- // console.log("popedup message : " + msg);
},
pushUploadProgressInfos : function(info){
this.get('uploadProgressInfos').pushObject(info);
- // console.log("pushed message : " + info);
},
clearUploadProgressModal : function(){
- // console.log("inside clearUploadProgressModal this.get('uploadProgressInfos') : " + this.get('uploadProgressInfos'));
var len = this.get('uploadProgressInfos').length;
for( var i = 0 ; i < len ; i++){
this.popUploadProgressInfos();
@@ -125,7 +144,6 @@ export default Ember.Controller.extend({
},
hideUploadModal : function(){
- console.log("hiding the modal ....");
this.clearUploadProgressModal();
Ember.$("#uploadProgressModal").modal("hide");
},
@@ -135,11 +153,12 @@ export default Ember.Controller.extend({
},
clearFields: function () {
+ this.set("showPreview",false);
this.set("hdfsPath");
this.set("header");
this.set("rows");
this.set("error");
- this.set('isFirstRowHeader',true);
+ this.set('isFirstRowHeader',false);
this.set('files');
this.set("firstRow");
this.set("selectedDatabase",null);
@@ -147,17 +166,15 @@ export default Ember.Controller.extend({
this.set("filePath");
this.set('tableName');
this.clearUploadProgressModal();
+ this.setDefaultDB();
this.printValues();
},
printValues: function () {
- console.log("printing all values : ");
- console.log("header : ", this.get('header'));
- console.log("rows : ", this.get('rows'));
- console.log("error : ", this.get('error'));
- console.log("isFirstRowHeader : ", this.get('isFirstRowHeader'));
- console.log("files : ", this.get('files'));
- console.log("firstRow : ", this.get('firstRow'));
+ console.log("header : ", this.get('header'),
+ ". rows : ",this.get('rows'),". error : ", this.get('error'),
+ " isFirstRowHeader : ", this.get('isFirstRowHeader'),
+ "firstRow : ", this.get('firstRow'));
},
generateTempTableName : function(){
@@ -173,21 +190,20 @@ export default Ember.Controller.extend({
waitForJobStatus: function (jobId, resolve, reject) {
console.log("finding status of job: ", jobId);
var self = this;
- var fetchJobPromise = this.get('jobService').fetchJobStatus(jobId);
+ var fetchJobPromise = this.get('jobService').fetchJob(jobId);
fetchJobPromise.then(function (data) {
console.log("waitForJobStatus : data : ", data);
- var status = data.jobStatus;
- if (status == "SUCCEEDED") {
+ var job = data.job;
+ var status = job.status.toUpperCase();
+ if (status == constants.statuses.succeeded ) {
console.log("resolving waitForJobStatus with : " , status);
- resolve(status);
- } else if (status == "CANCELED" || status == "CLOSED" || status == "ERROR") {
+ resolve(job);
+ } else if (status == constants.statuses.canceled || status == constants.statuses.closed || status == constants.statuses.error) {
console.log("rejecting waitForJobStatus with : " + status);
- reject(new Error(status));
+ reject(new Error(job.statusMessage));
} else {
- console.log("retrying waitForJobStatus : ");
- Ember.run.later(self, function() {
- this.waitForJobStatus(jobId, resolve, reject);
- }, 1000);
+ console.log("retrying waitForJobStatus : ", jobId);
+ self.waitForJobStatus(jobId, resolve, reject);
}
}, function (error) {
console.log("rejecting waitForJobStatus with : " + error);
@@ -226,14 +242,18 @@ export default Ember.Controller.extend({
waitForGeneratingPreview: function () {
console.log("waitForGeneratingPreview");
this.showUploadModal();
- this.pushUploadProgressInfos("<li> Generating Preview .... </li>")
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.generatingPreview'))
},
previewTable: function (data) {
console.log('inside previewTable');
+ var self = this;
+ var defaultColumnNames = data.header.map(function(item,index){
+ return self.COLUMN_NAME_PREFIX + index;
+ });
+ this.set("defaultColumnNames",defaultColumnNames);
this.set("header", data.header);
this.set("firstRow", data.rows[0].row);
- console.log("firstRow : ", this.get('firstRow'));
this.set('isFirstRowHeader', data.isFirstRowHeader);
this.set('tableName',data.tableName);
if(data.isFirstRowHeader == true){
@@ -244,23 +264,25 @@ export default Ember.Controller.extend({
onGeneratePreviewSuccess: function (data) {
console.log("onGeneratePreviewSuccess");
+ this.set("showPreview",true);
this.hideUploadModal();
this.previewTable(data);
},
onGeneratePreviewFailure: function (error) {
console.log("onGeneratePreviewFailure");
+ this.set("showPreview",false);
this.hideUploadModal();
this.setError(error);
},
- createTable: function () {
- console.log("table headers : ", this.get('header'));
+ createActualTable : function(){
+ console.log("createActualTable");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateActualTable'));
var headers = this.get('header');
-
var selectedDatabase = this.get('selectedDatabase');
- if (null == selectedDatabase || typeof selectedDatabase === 'undefined') {
- throw new Error(Ember.I18n.t('hive.errors.emptyDatabase'));
+ if (!selectedDatabase) {
+ throw new Error(this.translate('hive.errors.emptyDatabase', {database : this.translate("hive.words.database")}));
}
this.set('databaseName', this.get('selectedDatabase').get('name'));
@@ -269,17 +291,8 @@ export default Ember.Controller.extend({
var isFirstRowHeader = this.get('isFirstRowHeader');
var filetype = this.get("selectedFileType");
- if (null == databaseName || typeof databaseName === 'undefined' || databaseName == '') {
- throw new Error(Ember.I18n.t('hive.errors.emptyDatabase'));
- }
- if (null == tableName || typeof tableName === 'undefined' || tableName == '') {
- throw new Error(Ember.I18n.t('hive.errors.emptyTableName'));
- }
- if (null == isFirstRowHeader || typeof isFirstRowHeader === 'undefined') {
- throw new Error(Ember.I18n.t('hive.errors.emptyIsFirstRow'));
- }
-
- this.validateColumns();
+ this.validateInput(headers,tableName,databaseName,isFirstRowHeader);
+ this.showUploadModal();
return this.get('uploader').createTable({
"isFirstRowHeader": isFirstRowHeader,
@@ -290,16 +303,10 @@ export default Ember.Controller.extend({
});
},
- createActualTable : function(){
- console.log("createActualTable");
- this.pushUploadProgressInfos("<li> Starting to create Actual table.... </li>");
- return this.createTable();
- },
-
waitForCreateActualTable: function (jobId) {
console.log("waitForCreateActualTable");
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Waiting for creation of Actual table.... </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToCreateActualTable'));
var self = this;
var p = new Ember.RSVP.Promise(function (resolve, reject) {
self.waitForJobStatus(jobId, resolve, reject);
@@ -311,19 +318,19 @@ export default Ember.Controller.extend({
onCreateActualTableSuccess : function(){
console.log("onCreateTableSuccess");
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Successfully created Actual table. </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyCreatedActualTable'));
},
onCreateActualTableFailure : function(error){
console.log("onCreateActualTableFailure");
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Failed to create Actual table. </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToCreateActualTable'));
this.setError(error);
},
createTempTable : function(){
console.log("createTempTable");
- this.pushUploadProgressInfos("<li> Starting to create Temporary table.... </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateTemporaryTable'));
var tempTableName = this.generateTempTableName();
this.set('tempTableName',tempTableName);
return this.get('uploader').createTable({
@@ -338,7 +345,7 @@ export default Ember.Controller.extend({
waitForCreateTempTable: function (jobId) {
console.log("waitForCreateTempTable");
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Waiting for creation of Temporary table.... </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToCreateTemporaryTable'));
var self = this;
var p = new Ember.RSVP.Promise(function (resolve, reject) {
self.waitForJobStatus(jobId, resolve, reject);
@@ -350,11 +357,11 @@ export default Ember.Controller.extend({
onCreateTempTableSuccess : function(){
console.log("onCreateTempTableSuccess");
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Successfully created Temporary table. </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyCreatedTemporaryTable'));
},
deleteTable : function(databaseName, tableName){
- console.log("deleting table " + databaseName + "." + tableName);
+ console.log("deleting table ", databaseName , "." , tableName);
return this.get('uploader').deleteTable({
"database": databaseName,
@@ -365,7 +372,7 @@ export default Ember.Controller.extend({
deleteTableOnError : function(databaseName,tableName, tableLabel){
//delete table and wait for delete job
var self = this;
- this.pushUploadProgressInfos("<li> Deleting " + tableLabel + " table... </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.deletingTable',{table:tableLabel}));
return this.deleteTable(databaseName,tableName).then(function(data){
return new Ember.RSVP.Promise(function(resolve,reject){
@@ -373,26 +380,31 @@ export default Ember.Controller.extend({
});
}).then(function(){
self.popUploadProgressInfos();
- self.pushUploadProgressInfos("<li> Successfully deleted " + tableLabel + " table. </li>");
+ self.pushUploadProgressInfos(this.formatMessage('hive.messages.succesfullyDeletedTable',{table:tableLabel}));
return Ember.RSVP.Promise.resolve();
},function(err){
self.popUploadProgressInfos();
- self.pushUploadProgressInfos("<li> Failed to delete " + tableLabel + " table. </li>");
+ self.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToDeleteTable',{table:tableLabel}));
self.setError(err);
return Ember.RSVP.Promise.reject();
});
},
rollBackActualTableCreation : function(){
- return this.deleteTableOnError(this.get("databaseName"),this.get("tableName"),"Actual");
+ return this.deleteTableOnError(this.get("databaseName"),this.get("tableName"),this.translate('hive.words.actual'));
},
-
+ translate : function(str,vars){
+ return Ember.I18n.t(str,vars);
+ },
+ formatMessage : function(messageId, vars){
+ return "<li>" + this.translate(messageId,vars) + "</li>";
+ },
onCreateTempTableFailure : function(error){
console.log("onCreateTempTableFailure");
this.setError(error);
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Failed to create temporary table. </li>");
+ this.pushUploadProgressInfos();
return this.rollBackActualTableCreation().then(function(data){
return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
},function(err){
@@ -402,7 +414,7 @@ export default Ember.Controller.extend({
uploadFile : function(){
console.log("uploadFile");
- this.pushUploadProgressInfos("<li> Starting to upload the file .... </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToUploadFile'));
if( this.get("isLocalUpload")){
return this.uploadTable();
}else{
@@ -413,7 +425,7 @@ export default Ember.Controller.extend({
waitForUploadingFile: function (data) {
console.log("waitForUploadingFile");
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Waiting for uploading file .... </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToUploadFile'));
if( data.jobId ){
var self = this;
var p = new Ember.RSVP.Promise(function (resolve, reject) {
@@ -428,12 +440,12 @@ export default Ember.Controller.extend({
onUploadingFileSuccess: function () {
console.log("onUploadingFileSuccess");
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Successfully uploaded file. </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyUploadedFile') );
},
rollBackTempTableCreation : function(){
var self = this;
- return this.deleteTableOnError(this.get("databaseName"),this.get("tempTableName"),"Temporary").then(function(data){
+ return this.deleteTableOnError(this.get("databaseName"),this.get("tempTableName"),this.translate('hive.words.temporary')).then(function(data){
return self.rollBackActualTableCreation();
},function(err){
return self.rollBackActualTableCreation();
@@ -444,7 +456,7 @@ export default Ember.Controller.extend({
console.log("onUploadingFileFailure");
this.setError(error);
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Failed to upload file. </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToUploadFile'));
return this.rollBackTempTableCreation().then(function(data){
return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
},function(err){
@@ -458,7 +470,7 @@ export default Ember.Controller.extend({
insertIntoTable : function(){
console.log("insertIntoTable");
- this.pushUploadProgressInfos("<li> Starting to Insert rows from temporary table to actual table .... </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToInsertRows'));
return this.get('uploader').insertIntoTable({
"fromDatabase": this.get("databaseName"),
@@ -471,7 +483,7 @@ export default Ember.Controller.extend({
waitForInsertIntoTable: function (jobId) {
console.log("waitForInsertIntoTable");
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Waiting for Insertion of rows from temporary table to actual table .... </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToInsertRows'));
var self = this;
var p = new Ember.RSVP.Promise(function (resolve, reject) {
self.waitForJobStatus(jobId, resolve, reject);
@@ -483,14 +495,14 @@ export default Ember.Controller.extend({
onInsertIntoTableSuccess : function(){
console.log("onInsertIntoTableSuccess");
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Successfully inserted rows from temporary table to actual table. </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyInsertedRows'));
},
onInsertIntoTableFailure : function(error){
console.log("onInsertIntoTableFailure");
this.setError(error);
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Failed to insert rows from temporary table to actual table. </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToInsertRows'));
return this.rollBackUploadFile().then(function(data){
return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
},function(err){
@@ -500,7 +512,7 @@ export default Ember.Controller.extend({
deleteTempTable : function(){
console.log("deleteTempTable");
- this.pushUploadProgressInfos("<li> Starting to delete temporary table .... </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToDeleteTemporaryTable'));
return this.deleteTable(
this.get("databaseName"),
@@ -511,7 +523,7 @@ export default Ember.Controller.extend({
waitForDeleteTempTable: function (jobId) {
console.log("waitForDeleteTempTable");
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li> Waiting for deletion of temporary table .... </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToDeleteTemporaryTable'));
var self = this;
var p = new Ember.RSVP.Promise(function (resolve, reject) {
self.waitForJobStatus(jobId, resolve, reject);
@@ -523,43 +535,40 @@ export default Ember.Controller.extend({
onDeleteTempTableSuccess : function(){
console.log("onDeleteTempTableSuccess");
this.popUploadProgressInfos();
- this.pushUploadProgressInfos("<li>Successfully inserted row. </li>");
+ this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyDeletedTemporaryTable'));
this.onUploadSuccessfull();
},
onDeleteTempTableFailure : function(error){
console.log("onDeleteTempTableFailure");
this.setError(error);
- this.setError("You will have to manually delete the table " + this.get("databaseName") + "." + this.get("tempTableName"));
+ this.setError(this.formatMessage('hive.messages.manuallyDeleteTable',{databaseName:this.get('databaseName'), tableName: this.get("tempTableName")}));
},
createTableAndUploadFile : function(){
var self = this;
self.setError();
- self.showUploadModal();
self.createActualTable()
- .then(function(data){
- console.log("1. received data : ", data);
- return self.waitForCreateActualTable(data.jobId);
+ .then(function(job){
+ console.log("1. received job : ", job);
+ return self.waitForCreateActualTable(job.id);
},function(error){
- self.onCreateActualTableFailure(error);
console.log("Error occurred: ", error);
+ self.onCreateActualTableFailure(error);
throw error;
})
.then(function(data){
- console.log("2. received data : ", data);
self.onCreateActualTableSuccess(data);
return self.createTempTable(data);
},function(error){
if(!self.get('error')){
console.log("Error occurred: ", error);
- self.onCreateActualTableFailure(new Error("Server job for creation of actual table failed."));
+ self.onCreateActualTableFailure(error);
}
throw error;
})
- .then(function(data){
- console.log("3. received data : ", data);
- return self.waitForCreateTempTable(data.jobId);
+ .then(function(job){
+ return self.waitForCreateTempTable(job.id);
},function(error){
if(!self.get('error')){
console.log("Error occurred: ", error);
@@ -568,17 +577,15 @@ export default Ember.Controller.extend({
throw error;
})
.then(function(data){
- console.log("4. received data : ", data);
self.onCreateTempTableSuccess(data);
return self.uploadFile(data);
},function(error){
if(!self.get('error')){
console.log("Error occurred: ", error);
- return self.onCreateTempTableFailure(new Error("Server job for creation of temporary table failed."));
+ return self.onCreateTempTableFailure(error);
}
throw error;
}).then(function(data){
- console.log("4.5 received data : ", data);
return self.waitForUploadingFile(data);
},function(error){
if(!self.get('error')){
@@ -588,19 +595,17 @@ export default Ember.Controller.extend({
throw error;
})
.then(function(data){
- console.log("5. received data : ", data);
self.onUploadingFileSuccess(data);
return self.insertIntoTable(data);
},function(error){
if(!self.get('error')){
console.log("Error occurred: ", error);
- return self.onUploadingFileFailure(new Error("Server job for upload of file failed."));
+ return self.onUploadingFileFailure(error);
}
throw error;
})
- .then(function(data){
- console.log("6. received data : ", data);
- return self.waitForInsertIntoTable(data.jobId);
+ .then(function(job){
+ return self.waitForInsertIntoTable(job.id);
},function(error){
if(!self.get('error')){
console.log("Error occurred: ", error);
@@ -609,19 +614,17 @@ export default Ember.Controller.extend({
throw error;
})
.then(function(data){
- console.log("7. received data : ", data);
self.onInsertIntoTableSuccess(data);
return self.deleteTempTable(data);
},function(error){
if(!self.get('error')){
console.log("Error occurred: ", error);
- return self.onInsertIntoTableFailure(new Error("Server job for insert from temporary to actual table failed."));
+ return self.onInsertIntoTableFailure(error);
}
throw error;
})
- .then(function(data){
- console.log("8. received data : ", data);
- return self.waitForDeleteTempTable(data.jobId);
+ .then(function(job){
+ return self.waitForDeleteTempTable(job.id);
},function(error){
if(!self.get('error')){
console.log("Error occurred: ", error);
@@ -630,12 +633,11 @@ export default Ember.Controller.extend({
throw error;
})
.then(function(data){
- console.log("9. received data : ", data);
self.onDeleteTempTableSuccess(data);
},function(error){
if(!self.get('error')){
console.log("Error occurred: ", error);
- self.onDeleteTempTableFailure(new Error("Server job for deleting temporary table failed."));
+ self.onDeleteTempTableFailure(error);
}
throw error;
}).catch(function(error){
@@ -646,9 +648,30 @@ export default Ember.Controller.extend({
});
},
- validateColumns: function () {
+ validateInput: function (headers,tableName,databaseName,isFirstRowHeader) {
// throw exception if invalid.
+ if(!headers || headers.length == 0) throw new Error(this.translate('hive.errors.emptyHeaders'));
+
+ var regex = new RegExp(this.get("COLUMN_NAME_REGEX"),"g");
+
+ headers.forEach(function(column,index){
+ if( !column ) throw new Error(this.translate('hive.errors.emptyColumnName'));
+ var matchArr = column.name.match(regex);
+ if(matchArr == null || matchArr.length != 1 ) throw new Error(this.translate('hive.errors.illegalColumnName',{ columnName : column.name, index : (index + 1)}));
+ },this);
+
+ if(!tableName) throw new Error(this.translate('hive.errors.emptyTableName', {tableNameField : this.translate('hive.ui.tableName')}));
+ var tableRegex = new RegExp(this.get("TABLE_NAME_REGEX"),"g");
+ var mArr = tableName.match(tableRegex);
+ if(mArr == null || mArr.length != 1 ) throw new Error(this.translate('hive.errors.illegalTableName', {tableNameField:this.translate('hive.ui.tableName'),tableName:tableName}) );
+
+ if(!databaseName) throw new Error(this.translate('hive.errors.emptyDatabase', {database:this.translate('hive.words.database')}));
+
+ if (null == isFirstRowHeader || typeof isFirstRowHeader === 'undefined') { //this can be true or false. so explicitly checking for null/ undefined.
+ throw new Error(this.translate('hive.errors.emptyIsFirstRow', {isFirstRowHeaderField:this.translate('hive.ui.isFirstRowHeader')}));
+ }
},
+
setError: function (error) {
if(error){
console.log("upload table error : ", error);
@@ -666,7 +689,7 @@ export default Ember.Controller.extend({
uploadTableFromHdfs : function(){
console.log("uploadTableFromHdfs called.");
if(!(this.get("inputFileTypeCSV") == true && this.get("isFirstRowHeader") == false) ){
- this.pushUploadProgressInfos("<li>Uploading file .... </li>");
+ this.pushUploadProgressInfos(this.formatMessage('uploadingFromHdfs'));
}
return this.get('uploader').uploadFromHDFS({
"isFirstRowHeader": this.get("isFirstRowHeader"),
@@ -688,7 +711,8 @@ export default Ember.Controller.extend({
onUploadSuccessfull: function (data) {
console.log("onUploadSuccessfull : ", data);
- this.get('notifyService').success("Uploaded Successfully", "Table " + this.get('tableName') + " created in database " + this.get("databaseName"));
+ this.get('notifyService').success(this.translate('hive.messages.successfullyUploadedTableHeader'),
+ this.translate('hive.messages.successfullyUploadedTableMessage' ,{tableName:this.get('tableName') ,databaseName:this.get("databaseName")}));
this.clearFields();
},
@@ -712,7 +736,6 @@ export default Ember.Controller.extend({
},
filesUploaded: function (files) {
console.log("upload-table.js : uploaded new files : ", files);
-
this.clearFields();
this.set('files', files);
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js
index b3630c1..d2f6aaf 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js
@@ -244,9 +244,65 @@ TRANSLATIONS = {
hive: {
errors: {
'no.query': "No query to process.",
- 'emptyDatabase' : "Please select Database.",
- 'emptyTableName' : "Please enter tableName.",
- 'emptyIsFirstRow' : "Please select is First Row Header?"
+ 'emptyDatabase' : "Please select {{ database }}.",
+ 'emptyTableName' : "Please enter {{ tableNameField }}.",
+ 'illegalTableName':"Illegal {{ tableNameField }} : '{{ tableName }}'",
+ 'emptyIsFirstRow' : "{{isFirstRowHeaderField}} cannot be null.",
+ 'emptyHeaders':"Headers (containing column names) cannot be null.",
+ 'emptyColumnName':"Column name cannot be null.",
+ 'illegalColumnName':"Illegal column name : '{{columnName}}' in column number {{index}}",
+ },
+ messages : {
+ 'generatingPreview':"Generating Preview.",
+ 'startingToCreateActualTable' : "Starting to create Actual table",
+ 'waitingToCreateActualTable' : "Waiting for creation of Actual table",
+ 'successfullyCreatedActualTable' : "Successfully created Actual table.",
+ 'failedToCreateActualTable' : "Failed to create Actual table.",
+ 'startingToCreateTemporaryTable' : "Starting to create Temporary table.",
+ 'waitingToCreateTemporaryTable' : "Waiting for creation of Temporary table.",
+ 'successfullyCreatedTemporaryTable' : "Successfully created Temporary table.",
+ 'failedToCreateTemporaryTable' : " Failed to create temporary table.",
+ 'deletingTable' : "Deleting {{table}} table.",
+ 'succesfullyDeletedTable' : "Successfully deleted {{ table}} table.",
+ 'failedToDeleteTable' : "Failed to delete {{table}} table.",
+ 'startingToUploadFile' : "Starting to upload the file.",
+ 'waitingToUploadFile' : "Waiting for uploading file.",
+ 'successfullyUploadedFile' : "Successfully uploaded file.",
+ 'failedToUploadFile' : "Failed to upload file.",
+ 'startingToInsertRows' : "Starting to insert rows from temporary table to actual table.",
+ 'waitingToInsertRows' : "Waiting for insertion of rows from temporary table to actual table.",
+ 'successfullyInsertedRows' : "Successfully inserted rows from temporary table to actual table.",
+ 'failedToInsertRows' : "Failed to insert rows from temporary table to actual table.",
+ 'startingToDeleteTemporaryTable' : "Starting to delete temporary table.",
+ 'waitingToDeleteTemporaryTable' : "Waiting for deletion of temporary table.",
+ 'successfullyDeletedTemporaryTable' : "Successfully deleted temporary table",
+ 'manuallyDeleteTable' : "You will have to manually delete the table {{databaseName}}.{{tableName}}",
+ 'uploadingFromHdfs' : "Uploading file from HDFS ",
+ 'successfullyUploadedTableMessage' : "Table {{tableName}} created in database {{databaseName}}",
+ 'successfullyUploadedTableHeader' : "Uploaded Successfully"
+ },
+ words :{
+ temporary : "Temporary",
+ actual : "Actual",
+ database : "Database",
+ },
+ ui : {
+ 'uploadProgress' : "Upload Progress",
+ 'uploadFromLocal':"Upload from Local",
+ 'uploadFromHdfs':"Upload from HDFS",
+ 'selectFileType':"Select File Type",
+ 'fileType':"File type",
+ 'selectFromLocal':"Select from local",
+ 'hdfsPath':"HDFS Path",
+ 'selectDatabase':"Select a Database",
+ 'tableName':"Table name",
+ 'tableNameErrorMessage':"Only alphanumeric and underscore characters are allowed in table name.",
+ 'tableNameTooltip':"Enter valid (alphanumeric + underscore) table name.",
+ 'storedAs':"Stored as",
+ 'isFirstRowHeader':"Is first row header ?",
+ 'columnNameTooltip':"Enter valid (alphanumeric + underscore) column name.",
+ 'columnNameErrorMessage':"Only alphanumeric and underscore characters are allowed in column names.",
+
}
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js
index 848bee8..0aa3d41 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js
@@ -34,13 +34,6 @@ export default Ember.Route.extend({
if (!model) {
return;
}
-
- var filteredModel = model.filter(function (job) {
- //filter out jobs with referrer type of sample, explain and visual explain
- return (!job.get('referrer') || job.get('referrer') === constants.jobReferrer.job) &&
- !!job.get('id');
- });
-
- controller.set('history', filteredModel);
+ controller.set('history', model);
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/resources/ui/hive-web/app/services/job.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/services/job.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/services/job.js
index 6cb4170..7ba0601 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/services/job.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/services/job.js
@@ -31,7 +31,7 @@ export default Ember.Service.extend({
url: url,
type: 'DELETE',
headers: {
- 'X-Requested-By': 'ambari',
+ 'X-Requested-By': 'ambari'
},
success: function () {
job.reload();
@@ -39,11 +39,11 @@ export default Ember.Service.extend({
});
},
- fetchJobStatus: function (jobId) {
- console.log("finding status of job : ", jobId);
+ fetchJob : function (jobId){
+ console.log("fetching job : ", jobId);
var self = this;
var url = this.container.lookup('adapter:application').buildURL();
- url += "/jobs/" + jobId + "/status";
+ url += "/jobs/" + jobId ;
return Ember.$.ajax({
url: url,
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/resources/ui/hive-web/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/styles/app.scss b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/styles/app.scss
index 8f60595..803d9b7 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/styles/app.scss
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/styles/app.scss
@@ -678,3 +678,7 @@ td.data-upload-form-field {
table.no-border, table.no-border tr, table.no-border tr td {
border: none;
}
+
+.red-border {
+ border-color :red;
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab362d06/contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/components/validated-text-field.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/components/validated-text-field.hbs b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/components/validated-text-field.hbs
new file mode 100644
index 0000000..7cf0fcf
--- /dev/null
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/components/validated-text-field.hbs
@@ -0,0 +1,23 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{!
+* see example in validated-text-field.js component file
+}}
+
+{{input class=inputClass value=inputValue title=message placeholder=placeholder}}