You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2017/01/18 15:58:17 UTC

[44/50] [abbrv] ambari git commit: AMBARI-19584 : hive view 2.0 added REST endpoint to enable and fetch table and column statistics (nitirajrathore)

AMBARI-19584 : hive view 2.0 added REST endpoint to enable and fetch table and column statistics (nitirajrathore)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/189fae52
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/189fae52
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/189fae52

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 189fae52f11a975bb814884a844d55fedd23a51d
Parents: 1523386
Author: Nitiraj Rathore <ni...@gmail.com>
Authored: Wed Jan 18 13:19:07 2017 +0530
Committer: Nitiraj Rathore <ni...@gmail.com>
Committed: Wed Jan 18 13:23:31 2017 +0530

----------------------------------------------------------------------
 .../view/hive20/internal/dto/ColumnInfo.java    |   1 -
 .../view/hive20/internal/dto/ColumnStats.java   | 170 +++++++++++++
 .../view/hive20/internal/dto/TableMeta.java     |   9 +
 .../view/hive20/internal/dto/TableStats.java    |  88 +++++++
 .../internal/parsers/TableMetaParserImpl.java   |  41 ++-
 .../generators/AnalyzeTableQueryGenerator.java  |  40 +++
 .../FetchColumnStatsQueryGenerator.java         |  40 +++
 .../view/hive20/resources/browser/DDLProxy.java | 226 +++++++++++------
 .../hive20/resources/browser/DDLService.java    |  60 +++++
 .../view/hive20/resources/jobs/JobService.java  |  28 +--
 .../jobs/ResultsPaginationController.java       | 251 ++++++++++++++-----
 .../hive20/resources/jobs/viewJobs/JobImpl.java |   4 +
 .../rest/postman/hive20.postman_collection.json | 128 +++++++++-
 13 files changed, 920 insertions(+), 166 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java
index 2876348..44c82a0 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnInfo.java
@@ -25,7 +25,6 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
  */
 public class ColumnInfo {
   private String name;
-  // TODO : to be broken into datatype + precision + scale for better comparison
   private String type;
   private Integer precision;
   private Integer scale;

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnStats.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnStats.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnStats.java
new file mode 100644
index 0000000..190ecd3
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/ColumnStats.java
@@ -0,0 +1,170 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+public class ColumnStats {
+  public static final String COLUMN_NAME = "# col_name";
+  public static final String DATA_TYPE = "data_type";
+  public static final String MIN = "min";
+  public static final String MAX = "max";
+  public static final String NUM_NULLS = "num_nulls";
+  public static final String DISTINCT_COUNT = "distinct_count";
+  public static final String AVG_COL_LEN = "avg_col_len";
+  public static final String MAX_COL_LEN = "max_col_len";
+  public static final String NUM_TRUES = "num_trues";
+  public static final String NUM_FALSES = "num_falses";
+  public static final String COMMENT = "comment";
+
+  private String databaseName;
+  private String tableName;
+  private String columnName;
+  private String dataType;
+  private String min;
+  private String max;
+  private String numNulls;
+  private String distinctCount;
+  private String avgColLen;
+  private String maxColLen;
+  private String numTrues;
+  private String numFalse;
+  private String comment;
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public String getColumnName() {
+    return columnName;
+  }
+
+  public void setColumnName(String columnName) {
+    this.columnName = columnName;
+  }
+
+  public String getDataType() {
+    return dataType;
+  }
+
+  public void setDataType(String dataType) {
+    this.dataType = dataType;
+  }
+
+  public String getMin() {
+    return min;
+  }
+
+  public void setMin(String min) {
+    this.min = min;
+  }
+
+  public String getMax() {
+    return max;
+  }
+
+  public void setMax(String max) {
+    this.max = max;
+  }
+
+  public String getNumNulls() {
+    return numNulls;
+  }
+
+  public void setNumNulls(String numNulls) {
+    this.numNulls = numNulls;
+  }
+
+  public String getDistinctCount() {
+    return distinctCount;
+  }
+
+  public void setDistinctCount(String distinctCount) {
+    this.distinctCount = distinctCount;
+  }
+
+  public String getAvgColLen() {
+    return avgColLen;
+  }
+
+  public void setAvgColLen(String avgColLen) {
+    this.avgColLen = avgColLen;
+  }
+
+  public String getMaxColLen() {
+    return maxColLen;
+  }
+
+  public void setMaxColLen(String maxColLen) {
+    this.maxColLen = maxColLen;
+  }
+
+  public String getNumTrues() {
+    return numTrues;
+  }
+
+  public void setNumTrues(String numTrues) {
+    this.numTrues = numTrues;
+  }
+
+  public String getNumFalse() {
+    return numFalse;
+  }
+
+  public void setNumFalse(String numFalse) {
+    this.numFalse = numFalse;
+  }
+
+  public String getComment() {
+    return comment;
+  }
+
+  public void setComment(String comment) {
+    this.comment = comment;
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder sb = new StringBuilder("ColumnStats{");
+    sb.append("tableName='").append(tableName).append('\'');
+    sb.append(", columnName='").append(columnName).append('\'');
+    sb.append(", dataType='").append(dataType).append('\'');
+    sb.append(", min='").append(min).append('\'');
+    sb.append(", max='").append(max).append('\'');
+    sb.append(", numNulls='").append(numNulls).append('\'');
+    sb.append(", distinctCount='").append(distinctCount).append('\'');
+    sb.append(", avgColLen='").append(avgColLen).append('\'');
+    sb.append(", maxColLen='").append(maxColLen).append('\'');
+    sb.append(", numTrues='").append(numTrues).append('\'');
+    sb.append(", numFalse='").append(numFalse).append('\'');
+    sb.append(", comment='").append(comment).append('\'');
+    sb.append('}');
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableMeta.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableMeta.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableMeta.java
index f47e76c..861d132 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableMeta.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableMeta.java
@@ -32,6 +32,7 @@ public class TableMeta implements Serializable{
   private String ddl;
   private PartitionInfo partitionInfo;
   private DetailedTableInfo detailedInfo;
+  private TableStats tableStats;
   private StorageInfo storageInfo;
   private ViewInfo viewInfo;
 
@@ -107,6 +108,14 @@ public class TableMeta implements Serializable{
     this.viewInfo = viewInfo;
   }
 
+  public TableStats getTableStats() {
+    return tableStats;
+  }
+
+  public void setTableStats(TableStats tableStats) {
+    this.tableStats = tableStats;
+  }
+
   @Override
   public String toString() {
     final StringBuilder sb = new StringBuilder("TableMeta{");

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableStats.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableStats.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableStats.java
new file mode 100644
index 0000000..b8b4f07
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/dto/TableStats.java
@@ -0,0 +1,88 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.dto;
+
+/**
+ * this will be returned as a part of TableMeta which table info is called.
+ * It includes the part of DetailedTableInfo which contain statistics related data.
+ */
+public class TableStats {
+  public static final String NUM_FILES = "numFiles";
+  public static final String COLUMN_STATS_ACCURATE = "COLUMN_STATS_ACCURATE";
+  public static final String RAW_DATA_SIZE = "rawDataSize";
+  public static final String TOTAL_SIZE = "totalSize";
+
+  private Boolean isTableStatsEnabled;
+  private Integer numFiles;
+  private String columnStatsAccurate;
+  private Integer rawDataSize;
+  private Integer totalSize;
+
+  public Boolean getTableStatsEnabled() {
+    return isTableStatsEnabled;
+  }
+
+  public void setTableStatsEnabled(Boolean tableStatsEnabled) {
+    isTableStatsEnabled = tableStatsEnabled;
+  }
+
+  public Integer getNumFiles() {
+    return numFiles;
+  }
+
+  public void setNumFiles(Integer numFiles) {
+    this.numFiles = numFiles;
+  }
+
+  public String getColumnStatsAccurate() {
+    return columnStatsAccurate;
+  }
+
+  public void setColumnStatsAccurate(String columnStatsAccurate) {
+    this.columnStatsAccurate = columnStatsAccurate;
+  }
+
+  public Integer getRawDataSize() {
+    return rawDataSize;
+  }
+
+  public void setRawDataSize(Integer rawDataSize) {
+    this.rawDataSize = rawDataSize;
+  }
+
+  public Integer getTotalSize() {
+    return totalSize;
+  }
+
+  public void setTotalSize(Integer totalSize) {
+    this.totalSize = totalSize;
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder sb = new StringBuilder("TableStats{");
+    sb.append("isStatsEnabled='").append(isTableStatsEnabled).append('\'');
+    sb.append(", numFiles='").append(numFiles).append('\'');
+    sb.append(", columnStatsAccurate='").append(columnStatsAccurate).append('\'');
+    sb.append(", rawDataSize='").append(rawDataSize).append('\'');
+    sb.append(", totalSize='").append(totalSize).append('\'');
+    sb.append('}');
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java
index 5cae34a..b0c9fe4 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java
@@ -24,7 +24,9 @@ import org.apache.ambari.view.hive20.internal.dto.DetailedTableInfo;
 import org.apache.ambari.view.hive20.internal.dto.PartitionInfo;
 import org.apache.ambari.view.hive20.internal.dto.StorageInfo;
 import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.apache.ambari.view.hive20.internal.dto.TableStats;
 import org.apache.ambari.view.hive20.internal.dto.ViewInfo;
+import org.apache.parquet.Strings;
 
 import javax.inject.Inject;
 import java.util.List;
@@ -52,12 +54,11 @@ public class TableMetaParserImpl implements TableMetaParser<TableMeta> {
   @Inject
   private ViewInfoParser viewInfoParser;
 
-
-
   @Override
   public TableMeta parse(String database, String table, List<Row> createTableStatementRows, List<Row> describeFormattedRows) {
     String createTableStatement = createTableStatementParser.parse(createTableStatementRows);
     DetailedTableInfo tableInfo = detailedTableInfoParser.parse(describeFormattedRows);
+    TableStats tableStats = getTableStats(tableInfo);
     StorageInfo storageInfo = storageInfoParser.parse(describeFormattedRows);
     List<ColumnInfo> columns = columnInfoParser.parse(describeFormattedRows);
     PartitionInfo partitionInfo = partitionInfoParser.parse(describeFormattedRows);
@@ -74,6 +75,42 @@ public class TableMetaParserImpl implements TableMetaParser<TableMeta> {
     meta.setDetailedInfo(tableInfo);
     meta.setStorageInfo(storageInfo);
     meta.setViewInfo(viewInfo);
+    meta.setTableStats(tableStats);
     return meta;
   }
+
+  private TableStats getTableStats(DetailedTableInfo tableInfo) {
+    TableStats tableStats = new TableStats();
+    tableStats.setTableStatsEnabled(false);
+
+    String numFiles = tableInfo.getParameters().get(TableStats.NUM_FILES);
+    tableInfo.getParameters().remove(TableStats.NUM_FILES);
+
+    String columnStatsAccurate = tableInfo.getParameters().get(TableStats.COLUMN_STATS_ACCURATE);
+    tableInfo.getParameters().remove(TableStats.COLUMN_STATS_ACCURATE);
+
+    String rawDataSize = tableInfo.getParameters().get(TableStats.RAW_DATA_SIZE);
+    tableInfo.getParameters().remove(TableStats.RAW_DATA_SIZE);
+
+    String totalSize = tableInfo.getParameters().get(TableStats.TOTAL_SIZE);
+    tableInfo.getParameters().remove(TableStats.TOTAL_SIZE);
+
+    if(!Strings.isNullOrEmpty(numFiles) && !Strings.isNullOrEmpty(numFiles.trim())){
+      tableStats.setTableStatsEnabled(true);
+      tableStats.setNumFiles(Integer.valueOf(numFiles.trim()));
+    }
+
+    if(!Strings.isNullOrEmpty(rawDataSize) && !Strings.isNullOrEmpty(rawDataSize.trim())){
+      tableStats.setTableStatsEnabled(true);
+      tableStats.setRawDataSize(Integer.valueOf(rawDataSize.trim()));
+    }
+
+    if(!Strings.isNullOrEmpty(totalSize) && !Strings.isNullOrEmpty(totalSize.trim())){
+      tableStats.setTableStatsEnabled(true);
+      tableStats.setTotalSize(Integer.valueOf(totalSize.trim()));
+    }
+
+    tableStats.setColumnStatsAccurate(columnStatsAccurate);
+    return tableStats;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGenerator.java
new file mode 100644
index 0000000..902d959
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGenerator.java
@@ -0,0 +1,40 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.exceptions.ServiceException;
+
+public class AnalyzeTableQueryGenerator implements QueryGenerator {
+  private final String databaseName;
+  private final String tableName;
+  private final Boolean shouldAnalyzeColumns;
+
+  public AnalyzeTableQueryGenerator(String databaseName, String tableName, Boolean shouldAnalyzeColumns) {
+    this.databaseName = databaseName;
+    this.tableName = tableName;
+    this.shouldAnalyzeColumns = shouldAnalyzeColumns;
+  }
+
+  @Override
+  public Optional<String> getQuery() throws ServiceException {
+    return Optional.of("ANALYZE TABLE " + "`" + databaseName + "." + tableName + "`" + " COMPUTE STATISTICS " +
+      (shouldAnalyzeColumns? " FOR COLUMNS ": "") + ";");
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/FetchColumnStatsQueryGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/FetchColumnStatsQueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/FetchColumnStatsQueryGenerator.java
new file mode 100644
index 0000000..73b3698
--- /dev/null
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/FetchColumnStatsQueryGenerator.java
@@ -0,0 +1,40 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.hive20.internal.query.generators;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.hive20.exceptions.ServiceException;
+
+public class FetchColumnStatsQueryGenerator implements QueryGenerator{
+  private final String databaseName;
+  private final String tableName;
+  private final String columnName;
+
+  public FetchColumnStatsQueryGenerator(String databaseName, String tableName, String columnName) {
+    this.databaseName = databaseName;
+    this.tableName = tableName;
+    this.columnName = columnName;
+  }
+
+  @Override
+  public Optional<String> getQuery() throws ServiceException {
+    return Optional.of("DESCRIBE FORMATTED " + "`" + this.databaseName + "." + this.tableName +  "." + this.columnName +
+      "`" );
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
index 8d995dd..7210c75 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
@@ -32,8 +32,10 @@ import org.apache.ambari.view.hive20.actor.DatabaseManager;
 import org.apache.ambari.view.hive20.client.ConnectionConfig;
 import org.apache.ambari.view.hive20.client.DDLDelegator;
 import org.apache.ambari.view.hive20.client.DDLDelegatorImpl;
+import org.apache.ambari.view.hive20.client.HiveClientException;
 import org.apache.ambari.view.hive20.client.Row;
 import org.apache.ambari.view.hive20.exceptions.ServiceException;
+import org.apache.ambari.view.hive20.internal.dto.ColumnStats;
 import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
 import org.apache.ambari.view.hive20.internal.dto.DatabaseResponse;
 import org.apache.ambari.view.hive20.internal.dto.TableInfo;
@@ -41,11 +43,14 @@ import org.apache.ambari.view.hive20.internal.dto.TableMeta;
 import org.apache.ambari.view.hive20.internal.dto.TableResponse;
 import org.apache.ambari.view.hive20.internal.parsers.TableMetaParserImpl;
 import org.apache.ambari.view.hive20.internal.query.generators.AlterTableQueryGenerator;
+import org.apache.ambari.view.hive20.internal.query.generators.AnalyzeTableQueryGenerator;
 import org.apache.ambari.view.hive20.internal.query.generators.CreateTableQueryGenerator;
 import org.apache.ambari.view.hive20.internal.query.generators.DeleteDatabaseQueryGenerator;
 import org.apache.ambari.view.hive20.internal.query.generators.DeleteTableQueryGenerator;
+import org.apache.ambari.view.hive20.internal.query.generators.FetchColumnStatsQueryGenerator;
 import org.apache.ambari.view.hive20.internal.query.generators.RenameTableQueryGenerator;
 import org.apache.ambari.view.hive20.resources.jobs.JobServiceInternal;
+import org.apache.ambari.view.hive20.resources.jobs.ResultsPaginationController;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobController;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
@@ -116,6 +121,20 @@ public class DDLProxy {
     return transformToTableResponse(tableOptional.get(), databaseName);
   }
 
+  public Job getColumnStatsJob(final String databaseName, final String tableName, final String columnName,
+                         JobResourceManager resourceManager) throws ServiceException {
+    FetchColumnStatsQueryGenerator queryGenerator = new FetchColumnStatsQueryGenerator(databaseName, tableName,
+      columnName);
+    Optional<String> q = queryGenerator.getQuery();
+    String jobTitle = "Fetch column stats for " + databaseName + "." + tableName + "." + columnName;
+    if(q.isPresent()) {
+      String query = q.get();
+      return createJob(databaseName, query, jobTitle, resourceManager);
+    }else{
+      throw new ServiceException("Failed to generate job for {}" + jobTitle);
+    }
+  }
+
   public TableMeta getTableProperties(ViewContext context, ConnectionConfig connectionConfig, String databaseName, String tableName) {
     DDLDelegator delegator = new DDLDelegatorImpl(context, ConnectionSystem.getInstance().getActorSystem(), ConnectionSystem.getInstance().getOperationController(context));
     List<Row> createTableStatementRows = delegator.getTableCreateStatement(connectionConfig, databaseName, tableName);
@@ -222,40 +241,14 @@ public class DDLProxy {
 
   public Job createTable(String databaseName, TableMeta tableMeta, JobResourceManager resourceManager) throws ServiceException {
     String createTableQuery = this.generateCreateTableDDL(databaseName, tableMeta);
-    Map jobInfo = new HashMap<>();
-    jobInfo.put("title", "Create table " + tableMeta.getDatabase() + "." + tableMeta.getTable());
-    jobInfo.put("forcedContent", createTableQuery);
-    jobInfo.put("dataBase", databaseName);
-
-    try {
-      Job job = new JobImpl(jobInfo);
-      JobController createdJobController = new JobServiceInternal().createJob(job, resourceManager);
-      Job returnableJob = createdJobController.getJobPOJO();
-      LOG.info("returning job with id {} for create table {}", returnableJob.getId(), tableMeta.getTable());
-      return returnableJob;
-    } catch (Throwable e) {
-      LOG.error("Exception occurred while creating the table for create Query : {}", createTableQuery, e);
-      throw new ServiceException(e);
-    }
+    String jobTitle = "Create table " + tableMeta.getDatabase() + "." + tableMeta.getTable();
+    return createJob(databaseName, createTableQuery, jobTitle, resourceManager);
   }
 
   public Job deleteTable(String databaseName, String tableName, JobResourceManager resourceManager) throws ServiceException {
     String deleteTableQuery = generateDeleteTableDDL(databaseName, tableName);
-    Map jobInfo = new HashMap<>();
-    jobInfo.put("title", "Delete table " + databaseName + "." + tableName);
-    jobInfo.put("forcedContent", deleteTableQuery);
-    jobInfo.put("dataBase", databaseName);
-
-    try {
-      Job job = new JobImpl(jobInfo);
-      JobController createdJobController = new JobServiceInternal().createJob(job, resourceManager);
-      Job returnableJob = createdJobController.getJobPOJO();
-      LOG.info("returning job with id {} for the deletion of table : {}", returnableJob.getId(), tableName);
-      return returnableJob;
-    } catch (Throwable e) {
-      LOG.error("Exception occurred while deleting the table for delete Query : {}", deleteTableQuery, e);
-      throw new ServiceException(e);
-    }
+    String jobTitle = "Delete table " + databaseName + "." + tableName;
+    return createJob(databaseName, deleteTableQuery, jobTitle, resourceManager);
   }
 
   public String generateDeleteTableDDL(String databaseName, String tableName) throws ServiceException {
@@ -270,21 +263,8 @@ public class DDLProxy {
 
   public Job alterTable(ViewContext context, ConnectionConfig hiveConnectionConfig, String databaseName, String oldTableName, TableMeta newTableMeta, JobResourceManager resourceManager) throws ServiceException {
     String alterQuery = generateAlterTableQuery(context, hiveConnectionConfig, databaseName, oldTableName, newTableMeta);
-    Map jobInfo = new HashMap<>();
-    jobInfo.put("title", "Alter table " + databaseName + "." + oldTableName);
-    jobInfo.put("forcedContent", alterQuery);
-    jobInfo.put("dataBase", databaseName);
-
-    try {
-      Job job = new JobImpl(jobInfo);
-      JobController createdJobController = new JobServiceInternal().createJob(job, resourceManager);
-      Job returnableJob = createdJobController.getJobPOJO();
-      LOG.info("returning job with id {} for alter table {}", returnableJob.getId(), oldTableName);
-      return returnableJob;
-    } catch (Throwable e) {
-      LOG.error("Exception occurred while creating the table for create Query : {}", alterQuery, e);
-      throw new ServiceException(e);
-    }
+    String jobTitle = "Alter table " + databaseName + "." + oldTableName;
+    return createJob(databaseName, alterQuery, jobTitle, resourceManager);
   }
 
   public String generateAlterTableQuery(ViewContext context, ConnectionConfig hiveConnectionConfig, String databaseName, String oldTableName, TableMeta newTableMeta) throws ServiceException {
@@ -310,22 +290,9 @@ public class DDLProxy {
     Optional<String> renameTable = queryGenerator.getQuery();
     if(renameTable.isPresent()) {
       String renameQuery = renameTable.get();
-      LOG.info("Creating job for : {}", renameQuery);
-      Map jobInfo = new HashMap<>();
-      jobInfo.put("title", "Rename table " + oldDatabaseName + "." + oldTableName + " to " + newDatabaseName + "." + newTableName);
-      jobInfo.put("forcedContent", renameQuery);
-      jobInfo.put("dataBase", oldDatabaseName);
-
-      try {
-        Job job = new JobImpl(jobInfo);
-        JobController createdJobController = new JobServiceInternal().createJob(job, resourceManager);
-        Job returnableJob = createdJobController.getJobPOJO();
-        LOG.info("returning job with id {} for rename table {}", returnableJob.getId(), oldTableName);
-        return returnableJob;
-      } catch (Throwable e) {
-        LOG.error("Exception occurred while renaming the table for rename Query : {}", renameQuery, e);
-        throw new ServiceException(e);
-      }
+      String jobTitle = "Rename table " + oldDatabaseName + "." + oldTableName + " to " + newDatabaseName + "." +
+        newTableName;
+      return createJob(oldDatabaseName, renameQuery, jobTitle, resourceManager);
     }else{
       throw new ServiceException("Failed to generate rename table query for table " + oldDatabaseName + "." +
         oldTableName);
@@ -337,24 +304,129 @@ public class DDLProxy {
     Optional<String> deleteDatabase = queryGenerator.getQuery();
     if(deleteDatabase.isPresent()) {
       String deleteQuery = deleteDatabase.get();
-      LOG.info("Creating job for : {}", deleteQuery );
-      Map jobInfo = new HashMap<>();
-      jobInfo.put("title", "Delete database " + databaseName);
-      jobInfo.put("forcedContent", deleteQuery);
-      jobInfo.put("dataBase", databaseName);
-
-      try {
-        Job job = new JobImpl(jobInfo);
-        JobController createdJobController = new JobServiceInternal().createJob(job, resourceManager);
-        Job returnableJob = createdJobController.getJobPOJO();
-        LOG.info("returning job with id {} for deleting database {}", returnableJob.getId(), databaseName);
-        return returnableJob;
-      } catch (Throwable e) {
-        LOG.error("Exception occurred while renaming the table for rename Query : {}", deleteQuery, e);
-        throw new ServiceException(e);
-      }
+      return createJob(databaseName, deleteQuery, "Delete database " + databaseName , resourceManager);
     }else{
       throw new ServiceException("Failed to generate delete database query for database " + databaseName);
     }
   }
+
+  public Job createJob(String databaseName, String deleteQuery, String jobTitle, JobResourceManager resourceManager)
+    throws ServiceException {
+    LOG.info("Creating job for : {}", deleteQuery );
+    Map jobInfo = new HashMap<>();
+    jobInfo.put("title", jobTitle);
+    jobInfo.put("forcedContent", deleteQuery);
+    jobInfo.put("dataBase", databaseName);
+    jobInfo.put("referrer", JobImpl.REFERRER.INTERNAL.name());
+
+    try {
+      Job job = new JobImpl(jobInfo);
+      JobController createdJobController = new JobServiceInternal().createJob(job, resourceManager);
+      Job returnableJob = createdJobController.getJobPOJO();
+      LOG.info("returning job with id {} for {}", returnableJob.getId(), jobTitle);
+      return returnableJob;
+    } catch (Throwable e) {
+      LOG.error("Exception occurred while {} : {}", jobTitle, deleteQuery, e);
+      throw new ServiceException(e);
+    }
+  }
+
+  public Job analyzeTable(String databaseName, String tableName, Boolean shouldAnalyzeColumns, JobResourceManager resourceManager) throws ServiceException {
+    AnalyzeTableQueryGenerator queryGenerator = new AnalyzeTableQueryGenerator(databaseName, tableName, shouldAnalyzeColumns);
+    Optional<String> analyzeTable = queryGenerator.getQuery();
+    String jobTitle = "Analyze table " + databaseName + "." + tableName;
+    if(analyzeTable.isPresent()) {
+      String query = analyzeTable.get();
+      return createJob(databaseName, query, jobTitle, resourceManager);
+    }else{
+      throw new ServiceException("Failed to generate job for {}" + jobTitle);
+    }
+  }
+
+  public ColumnStats fetchColumnStats(String columnName, String jobId, ViewContext context) throws ServiceException {
+    try {
+      ResultsPaginationController.ResultsResponse results = ResultsPaginationController.getResult(jobId, null, null, null, null, context);
+      if(results.getHasResults()){
+       List<String[]> rows = results.getRows();
+       Map<Integer, String> headerMap = new HashMap<>();
+       boolean header = true;
+        for(String[] row : rows){
+          if(header){
+            for(int i = 0 ; i < row.length; i++){
+              if(!Strings.isNullOrEmpty(row[i])){
+                headerMap.put(i, row[i].trim());
+              }
+            }
+            header = false;
+          }
+          else if(row.length > 0 ){
+            if(columnName.equals(row[0])){ // the first column of the row contains column name
+              return createColumnStats(row, headerMap);
+            }
+          }
+        }
+      }else{
+        throw new ServiceException("Cannot find any result for this jobId: " + jobId);
+      }
+    } catch (HiveClientException e) {
+      LOG.error("Exception occurred while fetching results for column statistics with jobId: {}", jobId, e);
+      throw new ServiceException(e);
+    }
+
+    LOG.error("Column stats not found in the fetched results.");
+    throw new ServiceException("Could not find the column stats in the result.");
+  }
+
+  /**
+   * order of values in array
+   *  row [# col_name, data_type, min, max, num_nulls, distinct_count, avg_col_len, max_col_len,num_trues,num_falses,comment]
+   * indexes : 0           1        2    3     4             5               6             7           8         9    10
+   * @param row
+   * @param headerMap
+   * @return
+   */
+  private ColumnStats createColumnStats(String[] row, Map<Integer, String> headerMap) throws ServiceException {
+    if(null == row){
+      throw new ServiceException("row cannot be null.");
+    }
+    ColumnStats columnStats = new ColumnStats();
+    for(int i = 0 ; i < row.length; i++){
+      switch(headerMap.get(i)){
+        case ColumnStats.COLUMN_NAME:
+          columnStats.setColumnName(row[i]);
+          break;
+        case ColumnStats.DATA_TYPE:
+          columnStats.setDataType(row[i]);
+          break;
+        case ColumnStats.MIN:
+          columnStats.setMin(row[i]);
+          break;
+        case ColumnStats.MAX:
+          columnStats.setMax(row[i]);
+          break;
+        case ColumnStats.NUM_NULLS:
+          columnStats.setNumNulls(row[i]);
+          break;
+        case ColumnStats.DISTINCT_COUNT:
+          columnStats.setDistinctCount(row[i]);
+          break;
+        case ColumnStats.AVG_COL_LEN:
+          columnStats.setAvgColLen(row[i]);
+          break;
+        case ColumnStats.MAX_COL_LEN:
+          columnStats.setMaxColLen(row[i]);
+          break;
+        case ColumnStats.NUM_TRUES:
+          columnStats.setNumTrues(row[i]);
+          break;
+        case ColumnStats.NUM_FALSES:
+          columnStats.setNumFalse(row[i]);
+          break;
+        case ColumnStats.COMMENT:
+          columnStats.setComment(row[i]);
+      }
+    }
+
+    return columnStats;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
index e142baf..5c955a2 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive20.resources.browser;
 import org.apache.ambari.view.hive20.BaseService;
 import org.apache.ambari.view.hive20.client.ConnectionConfig;
 import org.apache.ambari.view.hive20.exceptions.ServiceException;
+import org.apache.ambari.view.hive20.internal.dto.ColumnStats;
 import org.apache.ambari.view.hive20.internal.dto.DatabaseResponse;
 import org.apache.ambari.view.hive20.internal.dto.TableMeta;
 import org.apache.ambari.view.hive20.internal.dto.TableResponse;
@@ -28,6 +29,7 @@ import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobResourceManager;
 import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
 import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+import org.apache.parquet.Strings;
 import org.json.simple.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -155,6 +157,28 @@ public class DDLService extends BaseService {
     }
   }
 
+  @PUT
+  @Path("databases/{database_id}/tables/{table_id}/analyze")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response analyzeTable(@PathParam("database_id") String databaseName, @PathParam("table_id") String tableName,
+                              @QueryParam("analyze_columns") String analyzeColumns) {
+    Boolean shouldAnalyzeColumns = Boolean.FALSE;
+    if(!Strings.isNullOrEmpty(analyzeColumns)){
+      shouldAnalyzeColumns = Boolean.valueOf(analyzeColumns.trim());
+    }
+    try {
+      Job job = proxy.analyzeTable(databaseName, tableName, shouldAnalyzeColumns, getResourceManager());
+      JSONObject response = new JSONObject();
+      response.put("job", job);
+      return Response.status(Response.Status.ACCEPTED).entity(response).build();
+    } catch (ServiceException e) {
+      LOG.error("Exception occurred while analyzing table for database {}, table: {}, analyzeColumns: {}" ,
+        databaseName, tableName, analyzeColumns, e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
   @POST
   @Path("databases/{database_id}/tables/ddl")
   @Produces(MediaType.APPLICATION_JSON)
@@ -241,6 +265,42 @@ public class DDLService extends BaseService {
     return Response.ok(response).build();
   }
 
+  @GET
+  @Path("databases/{database_id}/tables/{table_id}/column/{column_id}/stats")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response getColumnStats(@PathParam("database_id") String databaseName, @PathParam("table_id") String tableName,
+                            @PathParam("column_id") String columnName) {
+    try {
+      Job job = proxy.getColumnStatsJob(databaseName, tableName, columnName, getResourceManager());
+      JSONObject response = new JSONObject();
+      response.put("job", job);
+      return Response.status(Response.Status.ACCEPTED).entity(response).build();
+    } catch (ServiceException e) {
+      LOG.error("Exception occurred while fetching column stats", databaseName, tableName, e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @GET
+  @Path("databases/{database_id}/tables/{table_id}/column/{column_id}/fetch_stats")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response fetchColumnStats(@PathParam("database_id") String databaseName, @PathParam("table_id") String
+    tablename, @PathParam("column_id") String columnName, @QueryParam("job_id") String jobId) {
+    try {
+      ColumnStats columnStats = proxy.fetchColumnStats(columnName, jobId, context);
+      columnStats.setTableName(tablename);
+      columnStats.setDatabaseName(databaseName);
+      JSONObject response = new JSONObject();
+      response.put("columnStats", columnStats);
+      return Response.status(Response.Status.ACCEPTED).entity(response).build();
+    } catch (ServiceException e) {
+      LOG.error("Exception occurred while fetching column stats for column: {} and jobId: {}", columnName, jobId,  e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
   public static class DDL {
     String query;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java
index 675ea37..71cedd1 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/JobService.java
@@ -30,11 +30,8 @@ import org.apache.ambari.view.hive20.backgroundjobs.BackgroundJobController;
 import org.apache.ambari.view.hive20.client.AsyncJobRunner;
 import org.apache.ambari.view.hive20.client.AsyncJobRunnerImpl;
 import org.apache.ambari.view.hive20.client.ColumnDescription;
-import org.apache.ambari.view.hive20.client.Cursor;
-import org.apache.ambari.view.hive20.client.EmptyCursor;
 import org.apache.ambari.view.hive20.client.HiveClientException;
 import org.apache.ambari.view.hive20.client.NonPersistentCursor;
-import org.apache.ambari.view.hive20.client.Row;
 import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive20.resources.jobs.atsJobs.IATSParser;
 import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
@@ -79,7 +76,6 @@ import java.lang.reflect.InvocationTargetException;
 import java.sql.SQLException;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.Callable;
 
 /**
  * Servlet for queries
@@ -362,29 +358,7 @@ public class JobService extends BaseService {
                              @QueryParam("columns") final String requestedColumns) {
     try {
 
-      final String username = context.getUsername();
-
-      ConnectionSystem system = ConnectionSystem.getInstance();
-      final AsyncJobRunner asyncJobRunner = new AsyncJobRunnerImpl(context, system.getOperationController(context), system.getActorSystem());
-
-      return ResultsPaginationController.getInstance(context)
-              .request(jobId, searchId, true, fromBeginning, count, format,requestedColumns,
-                      new Callable<Cursor< Row, ColumnDescription >>() {
-                        @Override
-                        public Cursor call() throws Exception {
-                          Optional<NonPersistentCursor> cursor;
-                          if(fromBeginning != null && fromBeginning.equals("true")){
-                            cursor = asyncJobRunner.resetAndGetCursor(jobId, username);
-                          }
-                          else {
-                            cursor = asyncJobRunner.getCursor(jobId, username);
-                          }
-                          if(cursor.isPresent())
-                          return cursor.get();
-                          else
-                            return new EmptyCursor();
-                        }
-                      }).build();
+      return ResultsPaginationController.getResultAsResponse(jobId, fromBeginning, count, searchId, format, requestedColumns, context);
 
     } catch (WebApplicationException ex) {
       throw ex;

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ResultsPaginationController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ResultsPaginationController.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ResultsPaginationController.java
index 6efa2a9..e9b6d81 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ResultsPaginationController.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/ResultsPaginationController.java
@@ -20,20 +20,26 @@ package org.apache.ambari.view.hive20.resources.jobs;
 
 
 import com.google.common.base.Function;
+import com.google.common.base.Optional;
+import com.google.common.base.Strings;
 import com.google.common.collect.FluentIterable;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.ConnectionSystem;
+import org.apache.ambari.view.hive20.client.AsyncJobRunner;
+import org.apache.ambari.view.hive20.client.AsyncJobRunnerImpl;
 import org.apache.ambari.view.hive20.client.ColumnDescription;
 import org.apache.ambari.view.hive20.client.Cursor;
+import org.apache.ambari.view.hive20.client.EmptyCursor;
 import org.apache.ambari.view.hive20.client.HiveClientException;
+import org.apache.ambari.view.hive20.client.NonPersistentCursor;
 import org.apache.ambari.view.hive20.client.Row;
 import org.apache.ambari.view.hive20.utils.BadRequestFormattedException;
 import org.apache.ambari.view.hive20.utils.ResultFetchFormattedException;
 import org.apache.ambari.view.hive20.utils.ResultNotReadyFormattedException;
 import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
 import org.apache.commons.collections4.map.PassiveExpiringMap;
-import org.apache.hadoop.hbase.util.Strings;
 
 import javax.ws.rs.core.Response;
 import java.util.ArrayList;
@@ -65,6 +71,49 @@ public class ResultsPaginationController {
   private static final int DEFAULT_FETCH_COUNT = 50;
   private Map<String, Cursor<Row, ColumnDescription>> resultsCache;
 
+  public static Response getResultAsResponse(final String jobId, final String fromBeginning, Integer count, String searchId, String format, String requestedColumns, ViewContext context) throws HiveClientException {
+    final String username = context.getUsername();
+
+    ConnectionSystem system = ConnectionSystem.getInstance();
+    final AsyncJobRunner asyncJobRunner = new AsyncJobRunnerImpl(context, system.getOperationController(context), system.getActorSystem());
+
+    return getInstance(context)
+            .request(jobId, searchId, true, fromBeginning, count, format,requestedColumns,
+              createCallableMakeResultSets(jobId, fromBeginning, username, asyncJobRunner)).build();
+  }
+
+  public static ResultsResponse getResult(final String jobId, final String fromBeginning, Integer count, String
+    searchId, String requestedColumns, ViewContext context) throws HiveClientException {
+    final String username = context.getUsername();
+
+    ConnectionSystem system = ConnectionSystem.getInstance();
+    final AsyncJobRunner asyncJobRunner = new AsyncJobRunnerImpl(context, system.getOperationController(context), system.getActorSystem());
+
+    return getInstance(context)
+            .fetchResult(jobId, searchId, true, fromBeginning, count, requestedColumns,
+              createCallableMakeResultSets(jobId, fromBeginning, username, asyncJobRunner));
+  }
+
+  private static Callable<Cursor<Row, ColumnDescription>> createCallableMakeResultSets(final String jobId, final String
+    fromBeginning, final String username, final AsyncJobRunner asyncJobRunner) {
+    return new Callable<Cursor< Row, ColumnDescription >>() {
+      @Override
+      public Cursor call() throws Exception {
+        Optional<NonPersistentCursor> cursor;
+        if(fromBeginning != null && fromBeginning.equals("true")){
+          cursor = asyncJobRunner.resetAndGetCursor(jobId, username);
+        }
+        else {
+          cursor = asyncJobRunner.getCursor(jobId, username);
+        }
+        if(cursor.isPresent())
+        return cursor.get();
+        else
+          return new EmptyCursor();
+      }
+    };
+  }
+
   public static class CustomTimeToLiveExpirationPolicy extends PassiveExpiringMap.ConstantTimeToLiveExpirationPolicy<String, Cursor<Row, ColumnDescription>> {
     public CustomTimeToLiveExpirationPolicy(long timeToLiveMillis) {
       super(timeToLiveMillis);
@@ -125,72 +174,85 @@ public class ResultsPaginationController {
     return getResultsCache().get(key);
   }
 
-  public Response.ResponseBuilder request(String key, String searchId, boolean canExpire, String fromBeginning, Integer count, String format, String requestedColumns, Callable<Cursor<Row, ColumnDescription>> makeResultsSet) throws HiveClientException {
-    if (searchId == null)
-      searchId = DEFAULT_SEARCH_ID;
-    key = key + "?" + searchId;
-    if (!canExpire)
-      key = "$" + key;
-    if (fromBeginning != null && fromBeginning.equals("true") && getResultsCache().containsKey(key)) {
-
-      getResultsCache().remove(key);
-    }
-
-    Cursor<Row, ColumnDescription> resultSet = getResultsSet(key, makeResultsSet);
-
-    if (count == null)
-      count = DEFAULT_FETCH_COUNT;
-
-    List<ColumnDescription> allschema = resultSet.getDescriptions();
-    List<Row> allRowEntries = FluentIterable.from(resultSet)
-      .limit(count).toList();
+  /**
+   * returns the results in standard format
+   * @param key
+   * @param searchId
+   * @param canExpire
+   * @param fromBeginning
+   * @param count
+   * @param requestedColumns
+   * @param makeResultsSet
+   * @return
+   * @throws HiveClientException
+   */
+  public ResultsResponse fetchResult(String key, String searchId, boolean canExpire, String fromBeginning, Integer
+    count, String requestedColumns, Callable<Cursor<Row, ColumnDescription>> makeResultsSet) throws HiveClientException {
 
-    List<ColumnDescription> schema = allschema;
+    ResultProcessor resultProcessor = new ResultProcessor(key, searchId, canExpire, fromBeginning, count, requestedColumns, makeResultsSet).invoke();
+    List<Object[]> rows = resultProcessor.getRows();
+    List<ColumnDescription> schema = resultProcessor.getSchema();
+    Cursor<Row, ColumnDescription> resultSet = resultProcessor.getResultSet();
 
-    final Set<Integer> selectedColumns = getRequestedColumns(requestedColumns);
-    if (!selectedColumns.isEmpty()) {
-      schema = filter(allschema, selectedColumns);
-    }
+    int read = rows.size();
+    return getResultsResponse(rows, schema, resultSet, read);
+  }
 
-    List<Object[]> rows = FluentIterable.from(allRowEntries)
-      .transform(new Function<Row, Object[]>() {
-        @Override
-        public Object[] apply(Row input) {
-          if(!selectedColumns.isEmpty()) {
-            return filter(Lists.newArrayList(input.getRow()), selectedColumns).toArray();
-          } else {
-            return input.getRow();
-          }
-        }
-      }).toList();
+  /**
+   * returns the results in either D3 format or starndard format wrapped inside ResponseBuilder object.
+   * @param key
+   * @param searchId
+   * @param canExpire
+   * @param fromBeginning
+   * @param count : number of rows to fetch
+   * @param format : 'd3' or empty
+   * @param requestedColumns
+   * @param makeResultsSet
+   * @return
+   * @throws HiveClientException
+   */
+  public Response.ResponseBuilder request(String key, String searchId, boolean canExpire, String fromBeginning, Integer count, String format, String requestedColumns, Callable<Cursor<Row, ColumnDescription>> makeResultsSet) throws HiveClientException {
+    ResultProcessor resultProcessor = new ResultProcessor(key, searchId, canExpire, fromBeginning, count, requestedColumns, makeResultsSet).invoke();
+    List<Object[]> rows = resultProcessor.getRows();
+    List<ColumnDescription> schema = resultProcessor.getSchema();
+    Cursor<Row, ColumnDescription> resultSet = resultProcessor.getResultSet();
 
     int read = rows.size();
     if(format != null && format.equalsIgnoreCase("d3")) {
-      List<Map<String,Object>> results = new ArrayList<>();
-      for(int i=0; i<rows.size(); i++) {
-        Object[] row = rows.get(i);
-        Map<String, Object> keyValue = new HashMap<>(row.length);
-        for(int j=0; j<row.length; j++) {
-          //Replace dots in schema with underscore
-          String schemaName = schema.get(j).getName();
-          keyValue.put(schemaName.replace('.','_'), row[j]);
-        }
-        results.add(keyValue);
-      }
+      List<Map<String, Object>> results = getD3FormattedResult(rows, schema);
       return Response.ok(results);
     } else {
-      ResultsResponse resultsResponse = new ResultsResponse();
-      resultsResponse.setSchema(schema);
-      resultsResponse.setRows(rows);
-      resultsResponse.setReadCount(read);
-      resultsResponse.setHasNext(resultSet.hasNext());
-      //      resultsResponse.setSize(resultSet.size());
-      resultsResponse.setOffset(resultSet.getOffset());
-      resultsResponse.setHasResults(true);
+      ResultsResponse resultsResponse = getResultsResponse(rows, schema, resultSet, read);
       return Response.ok(resultsResponse);
     }
   }
 
+  public List<Map<String, Object>> getD3FormattedResult(List<Object[]> rows, List<ColumnDescription> schema) {
+    List<Map<String,Object>> results = new ArrayList<>();
+    for(int i=0; i<rows.size(); i++) {
+      Object[] row = rows.get(i);
+      Map<String, Object> keyValue = new HashMap<>(row.length);
+      for(int j=0; j<row.length; j++) {
+        //Replace dots in schema with underscore
+        String schemaName = schema.get(j).getName();
+        keyValue.put(schemaName.replace('.','_'), row[j]);
+      }
+      results.add(keyValue);
+    } return results;
+  }
+
+  public ResultsResponse getResultsResponse(List<Object[]> rows, List<ColumnDescription> schema, Cursor<Row, ColumnDescription> resultSet, int read) {
+    ResultsResponse resultsResponse = new ResultsResponse();
+    resultsResponse.setSchema(schema);
+    resultsResponse.setRows(rows);
+    resultsResponse.setReadCount(read);
+    resultsResponse.setHasNext(resultSet.hasNext());
+    //      resultsResponse.setSize(resultSet.size());
+    resultsResponse.setOffset(resultSet.getOffset());
+    resultsResponse.setHasResults(true);
+    return resultsResponse;
+  }
+
   private <T> List<T> filter(List<T> list, Set<Integer> selectedColumns) {
     List<T> filtered = Lists.newArrayList();
     for(int i: selectedColumns) {
@@ -202,7 +264,7 @@ public class ResultsPaginationController {
   }
 
   private Set<Integer> getRequestedColumns(String requestedColumns) {
-    if(Strings.isEmpty(requestedColumns)) {
+    if(Strings.isNullOrEmpty(requestedColumns)) {
       return new HashSet<>();
     }
     Set<Integer> selectedColumns = Sets.newHashSet();
@@ -216,7 +278,7 @@ public class ResultsPaginationController {
     return selectedColumns;
   }
 
-  private static class ResultsResponse {
+  public static class ResultsResponse {
     private List<ColumnDescription> schema;
     private List<String[]> rows;
     private int readCount;
@@ -283,4 +345,79 @@ public class ResultsPaginationController {
       this.hasResults = hasResults;
     }
   }
+
+  private class ResultProcessor {
+    private String key;
+    private String searchId;
+    private boolean canExpire;
+    private String fromBeginning;
+    private Integer count;
+    private String requestedColumns;
+    private Callable<Cursor<Row, ColumnDescription>> makeResultsSet;
+    private Cursor<Row, ColumnDescription> resultSet;
+    private List<ColumnDescription> schema;
+    private List<Object[]> rows;
+
+    public ResultProcessor(String key, String searchId, boolean canExpire, String fromBeginning, Integer count, String requestedColumns, Callable<Cursor<Row, ColumnDescription>> makeResultsSet) {
+      this.key = key;
+      this.searchId = searchId;
+      this.canExpire = canExpire;
+      this.fromBeginning = fromBeginning;
+      this.count = count;
+      this.requestedColumns = requestedColumns;
+      this.makeResultsSet = makeResultsSet;
+    }
+
+    public Cursor<Row, ColumnDescription> getResultSet() {
+      return resultSet;
+    }
+
+    public List<ColumnDescription> getSchema() {
+      return schema;
+    }
+
+    public List<Object[]> getRows() {
+      return rows;
+    }
+
+    public ResultProcessor invoke() {
+      if (searchId == null)
+        searchId = DEFAULT_SEARCH_ID;
+      key = key + "?" + searchId;
+      if (!canExpire)
+        key = "$" + key;
+      if (fromBeginning != null && fromBeginning.equals("true") && getResultsCache().containsKey(key)) {
+        getResultsCache().remove(key);
+      }
+
+      resultSet = getResultsSet(key, makeResultsSet);
+
+      if (count == null)
+        count = DEFAULT_FETCH_COUNT;
+
+      List<ColumnDescription> allschema = resultSet.getDescriptions();
+      List<Row> allRowEntries = FluentIterable.from(resultSet)
+        .limit(count).toList();
+
+      schema = allschema;
+
+      final Set<Integer> selectedColumns = getRequestedColumns(requestedColumns);
+      if (!selectedColumns.isEmpty()) {
+        schema = filter(allschema, selectedColumns);
+      }
+
+      rows = FluentIterable.from(allRowEntries)
+        .transform(new Function<Row, Object[]>() {
+          @Override
+          public Object[] apply(Row input) {
+            if (!selectedColumns.isEmpty()) {
+              return filter(Lists.newArrayList(input.getRow()), selectedColumns).toArray();
+            } else {
+              return input.getRow();
+            }
+          }
+        }).toList();
+      return this;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobImpl.java
index 85ffaf2..abb395d 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobImpl.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/jobs/viewJobs/JobImpl.java
@@ -28,6 +28,10 @@ import java.util.Map;
  * Bean to represent saved query
  */
 public class JobImpl implements Job {
+  public enum REFERRER {
+    INTERNAL,
+    USER
+  }
   private String title = null;
   private String queryFile = null;
   private String statusDir = null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/189fae52/contrib/views/hive20/src/test/rest/postman/hive20.postman_collection.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/test/rest/postman/hive20.postman_collection.json b/contrib/views/hive20/src/test/rest/postman/hive20.postman_collection.json
index d674944..4f78b59 100644
--- a/contrib/views/hive20/src/test/rest/postman/hive20.postman_collection.json
+++ b/contrib/views/hive20/src/test/rest/postman/hive20.postman_collection.json
@@ -19,7 +19,7 @@
 						"showPassword": false
 					}
 				},
-				"url": "{{APP_BASE_URL}}/resources/ddl/databases/d1/tables/t1/info?_=1481634018195",
+				"url": "{{APP_BASE_URL}}/resources/ddl/databases/default/tables/tt1/info?_=1481634018195",
 				"method": "GET",
 				"header": [
 					{
@@ -367,7 +367,7 @@
 						"showPassword": false
 					}
 				},
-				"url": "{{APP_BASE_URL}}/resources/jobs/202",
+				"url": "{{APP_BASE_URL}}/resources/jobs/257",
 				"method": "GET",
 				"header": [
 					{
@@ -437,6 +437,130 @@
 				"description": "drop database "
 			},
 			"response": []
+		},
+		{
+			"name": "fetch column stats",
+			"request": {
+				"auth": {
+					"type": "basic",
+					"basic": {
+						"username": "admin",
+						"password": "admin",
+						"saveHelperData": true,
+						"showPassword": false
+					}
+				},
+				"url": "{{APP_BASE_URL}}/resources/ddl/databases/default/tables/tt1/column/i/stats",
+				"method": "GET",
+				"header": [
+					{
+						"key": "X-Requested-By",
+						"value": "ambari",
+						"description": ""
+					},
+					{
+						"key": "Authorization",
+						"value": "Basic YWRtaW46YWRtaW4=",
+						"description": ""
+					}
+				],
+				"body": {},
+				"description": "fetch column stats"
+			},
+			"response": []
+		},
+		{
+			"name": "fetch job results",
+			"request": {
+				"auth": {
+					"type": "basic",
+					"basic": {
+						"username": "admin",
+						"password": "admin",
+						"saveHelperData": true,
+						"showPassword": false
+					}
+				},
+				"url": "{{APP_BASE_URL}}/resources/jobs/101/results?first=true&_=1484636273461",
+				"method": "GET",
+				"header": [
+					{
+						"key": "X-Requested-By",
+						"value": "ambari",
+						"description": ""
+					},
+					{
+						"key": "Authorization",
+						"value": "Basic YWRtaW46YWRtaW4=",
+						"description": ""
+					}
+				],
+				"body": {},
+				"description": "fetch job results"
+			},
+			"response": []
+		},
+		{
+			"name": "fetch column Stats result",
+			"request": {
+				"auth": {
+					"type": "basic",
+					"basic": {
+						"username": "admin",
+						"password": "admin",
+						"saveHelperData": true,
+						"showPassword": false
+					}
+				},
+				"url": "{{APP_BASE_URL}}/resources/ddl/databases/default/tables/tt1/column/i/fetch_stats?job_id=255",
+				"method": "GET",
+				"header": [
+					{
+						"key": "X-Requested-By",
+						"value": "ambari",
+						"description": ""
+					},
+					{
+						"key": "Authorization",
+						"value": "Basic YWRtaW46YWRtaW4=",
+						"description": ""
+					}
+				],
+				"body": {},
+				"description": "fetch column Stats result"
+			},
+			"response": []
+		},
+		{
+			"name": "analyze table",
+			"request": {
+				"auth": {
+					"type": "basic",
+					"basic": {
+						"username": "admin",
+						"password": "admin",
+						"saveHelperData": true,
+						"showPassword": false
+					}
+				},
+				"url": "{{APP_BASE_URL}}/resources/ddl/databases/default/tables/t1/analyze?analyze_columns=true",
+				"method": "PUT",
+				"header": [
+					{
+						"key": "X-Requested-By",
+						"value": "ambari",
+						"description": ""
+					},
+					{
+						"key": "Authorization",
+						"value": "Basic YWRtaW46YWRtaW4=",
+						"description": ""
+					}
+				],
+				"body": {},
+				"description": "analyze table"
+			},
+			"response": []
 		}
 	]
 }
\ No newline at end of file