You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by pa...@apache.org on 2016/06/30 11:58:29 UTC

ambari git commit: AMBARI-17484: bringing in changes of AMBARI-16980 : History tab takes long to populate when there is more entry in history table. (Nitiraj Rathore via pallavkul)

Repository: ambari
Updated Branches:
  refs/heads/trunk 33d905062 -> ebaed6ad6


AMBARI-17484: bringing in changes of AMBARI-16980 : History tab takes long to populate when there is more entry in history table. (Nitiraj Rathore via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ebaed6ad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ebaed6ad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ebaed6ad

Branch: refs/heads/trunk
Commit: ebaed6ad69e2d6b506403f2f09d1c3b576be740b
Parents: 33d9050
Author: Pallav Kulshreshtha <pa...@gmail.com>
Authored: Thu Jun 30 17:27:28 2016 +0530
Committer: Pallav Kulshreshtha <pa...@gmail.com>
Committed: Thu Jun 30 17:27:28 2016 +0530

----------------------------------------------------------------------
 .../hive2/persistence/utils/ItemNotFound.java   |  18 ++
 .../view/hive2/resources/jobs/Aggregator.java   | 194 +++++++++++++++---
 .../view/hive2/resources/jobs/JobService.java   |  43 +++-
 .../hive2/resources/jobs/atsJobs/ATSParser.java |  82 +++++++-
 .../jobs/atsJobs/ATSRequestsDelegate.java       |   6 +-
 .../jobs/atsJobs/ATSRequestsDelegateImpl.java   |  35 +++-
 .../resources/jobs/atsJobs/IATSParser.java      |   8 +-
 .../view/hive2/resources/jobs/viewJobs/Job.java |   4 +-
 .../jobs/viewJobs/JobControllerImpl.java        |  17 +-
 .../hive2/resources/jobs/viewJobs/JobImpl.java  |  28 +--
 .../hive2/resources/jobs/viewJobs/JobInfo.java  |  78 +++++++
 .../app/components/number-range-widget.js       |  15 +-
 .../ui/hive-web/app/controllers/history.js      | 201 ++++++++++++------
 .../ui/hive-web/app/initializers/i18n.js        |   3 +-
 .../resources/ui/hive-web/app/models/job.js     |   3 +-
 .../resources/ui/hive-web/app/routes/history.js |  16 +-
 .../ui/hive-web/app/services/history.js         | 204 +++++++++++++++++++
 .../ui/hive-web/app/templates/history.hbs       |  68 ++++---
 .../ui/hive-web/app/utils/constants.js          |   2 +-
 19 files changed, 843 insertions(+), 182 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/utils/ItemNotFound.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/utils/ItemNotFound.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/utils/ItemNotFound.java
index ad2adce..2433e34 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/utils/ItemNotFound.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/utils/ItemNotFound.java
@@ -22,4 +22,22 @@ package org.apache.ambari.view.hive2.persistence.utils;
  * Thrown when item was not found in DB
  */
 public class ItemNotFound extends Exception {
+  public ItemNotFound() {
+  }
+
+  public ItemNotFound(String message) {
+    super(message);
+  }
+
+  public ItemNotFound(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public ItemNotFound(Throwable cause) {
+    super(cause);
+  }
+
+  public ItemNotFound(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java
index f184150..083423a 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java
@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive2.resources.jobs;
 import org.apache.ambari.view.hive2.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive2.persistence.utils.Indexed;
 import org.apache.ambari.view.hive2.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive2.persistence.utils.OnlyOwnersFilteringStrategy;
 import org.apache.ambari.view.hive2.resources.IResourceManager;
 import org.apache.ambari.view.hive2.resources.files.FileService;
 import org.apache.ambari.view.hive2.resources.jobs.atsJobs.HiveQueryId;
@@ -28,16 +29,15 @@ import org.apache.ambari.view.hive2.resources.jobs.atsJobs.IATSParser;
 import org.apache.ambari.view.hive2.resources.jobs.atsJobs.TezDagId;
 import org.apache.ambari.view.hive2.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobInfo;
 import org.apache.commons.beanutils.PropertyUtils;
-import org.apache.commons.codec.binary.Base64;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.lang.reflect.InvocationTargetException;
-import java.util.HashSet;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.Set;
 
 /**
  * View Jobs and ATS Jobs aggregator.
@@ -70,36 +70,181 @@ public class Aggregator {
     this.ats = ats;
   }
 
+  /**
+   * gets all the jobs for 'username' where the job submission time is between 'startTime' (inclusive)
+   * and endTime (exclusive).
+   * Fetches the jobs from ATS and DB merges and update DB. returns the combined list.
+   *
+   * @param username:  username for which jobs have to be fetched.
+   * @param startTime: inclusive, time in secs from epoch
+   * @param endTime:   exclusive, time in secs from epoch
+   * @return: list of jobs
+   */
+  public List<Job> readAllForUserByTime(String username, long startTime, long endTime) {
+    List<HiveQueryId> queryIdList = ats.getHiveQueryIdsForUserByTime(username, startTime, endTime);
+    List<Job> allJobs = fetchDagsAndMergeJobs(queryIdList);
+    List<Job> dbOnlyJobs = readDBOnlyJobs(username, queryIdList, startTime, endTime);
+    allJobs.addAll(dbOnlyJobs);
+
+    return allJobs;
+  }
+
+  /**
+   * fetches the new state of jobs from ATS and from DB. Does merging/updating as required.
+   * @param jobInfos: infos of job to get
+   * @return: list of updated Job
+   */
+  public List<Job> readJobsByIds(List<JobInfo> jobInfos) {
+    //categorize jobs
+    List<String> jobsWithHiveIds = new LinkedList<>();
+    List<String> dbOnlyJobs = new LinkedList<>();
+
+    for (JobInfo jobInfo : jobInfos) {
+      if (null == jobInfo.getHiveId() || jobInfo.getHiveId().trim().isEmpty()) {
+        dbOnlyJobs.add(jobInfo.getJobId());
+      } else {
+        jobsWithHiveIds.add(jobInfo.getHiveId());
+      }
+    }
+
+    List<HiveQueryId> queryIdList = ats.getHiveQueryIdByEntityList(jobsWithHiveIds);
+    List<Job> allJobs = fetchDagsAndMergeJobs(queryIdList);
+    List<Job> dbJobs = readJobsFromDbByJobId(dbOnlyJobs);
+
+    allJobs.addAll(dbJobs);
+    return allJobs;
+  }
+
+  /**
+   * gets the jobs from the Database given their id
+   * @param jobsIds: list of ids of jobs
+   * @return: list of all the jobs found
+   */
+  private List<Job> readJobsFromDbByJobId(List<String> jobsIds) {
+    List<Job> jobs = new LinkedList<>();
+    for (final String jid : jobsIds) {
+      try {
+        Job job = getJobFromDbByJobId(jid);
+        jobs.add(job);
+      } catch (ItemNotFound itemNotFound) {
+        LOG.error("Error while finding job with id : {}", jid, itemNotFound);
+      }
+    }
+
+    return jobs;
+  }
+
+  /**
+   * fetches the job from DB given its id
+   * @param jobId: the id of the job to fetch
+   * @return: the job
+   * @throws ItemNotFound: if job with given id is not found in db
+   */
+  private Job getJobFromDbByJobId(final String jobId) throws ItemNotFound {
+    if (null == jobId)
+      return null;
+
+    List<Job> jobs = viewJobResourceManager.readAll(new FilteringStrategy() {
+      @Override
+      public boolean isConform(Indexed item) {
+        return item.getId().equals(jobId);
+      }
+
+      @Override
+      public String whereStatement() {
+        return "id = '" + jobId + "'"; // even IDs are string
+      }
+    });
+
+    if (null != jobs && !jobs.isEmpty())
+      return jobs.get(0);
+
+    throw new ItemNotFound(String.format("Job with id %s not found.", jobId));
+  }
+
+  /**
+   * returns all the jobs from ATS and DB (for this instance) for the given user.
+   * @param username
+   * @return
+   */
   public List<Job> readAll(String username) {
-    Set<String> addedOperationIds = new HashSet<>();
+    List<HiveQueryId> queries = ats.getHiveQueryIdsForUser(username);
+    LOG.debug("HiveQueryIds fetched : {}", queries);
+    List<Job> allJobs = fetchDagsAndMergeJobs(queries);
+    List<Job> dbOnlyJobs = readDBOnlyJobs(username, queries, null, null);
+    LOG.debug("Jobs only present in DB: {}", dbOnlyJobs);
+    allJobs.addAll(dbOnlyJobs);
+    return allJobs;
+  }
 
+  /**
+   * reads all the jobs from DB for username and excludes the jobs mentioned in queries list
+   * @param username : username for which the jobs are to be read.
+   * @param queries : the jobs to exclude
+   * @param startTime: can be null, if not then the window start time for job
+   * @param endTime: can be null, if not then the window end time for job
+   * @return : the jobs in db that are not in the queries
+   */
+  private List<Job> readDBOnlyJobs(String username, List<HiveQueryId> queries, Long startTime, Long endTime) {
+    List<Job> dbOnlyJobs = new LinkedList<>();
+    HashMap<String, String> operationIdVsHiveId = new HashMap<>();
+
+    for (HiveQueryId hqid : queries) {
+      operationIdVsHiveId.put(hqid.operationId, hqid.entity);
+    }
+    LOG.info("operationIdVsHiveId : {} ", operationIdVsHiveId);
+    //cover case when operationId is present, but not exists in ATS
+    //e.g. optimized queries without executing jobs, like "SELECT * FROM TABLE"
+    List<Job> jobs = viewJobResourceManager.readAll(new OnlyOwnersFilteringStrategy(username));
+    for (Job job : jobs) {
+      if (null != startTime && null != endTime && null != job.getDateSubmitted()
+        && (job.getDateSubmitted() < startTime || job.getDateSubmitted() >= endTime || operationIdVsHiveId.containsKey(job.getGuid()))
+        ) {
+        continue; // don't include this in the result
+      } else {
+        dbOnlyJobs.add(job);
+      }
+    }
+    return dbOnlyJobs;
+  }
+
+  private List<Job> fetchDagsAndMergeJobs(List<HiveQueryId> queries) {
     List<Job> allJobs = new LinkedList<Job>();
-    List<HiveQueryId> queries = ats.getHiveQueryIdsList(username);
-    for (HiveQueryId atsHiveQuery : queries) {
-      TezDagId atsTezDag = getTezDagFromHiveQueryId(atsHiveQuery);
 
-      JobImpl atsJob;
+    for (HiveQueryId atsHiveQuery : queries) {
+      JobImpl atsJob = null;
       if (hasOperationId(atsHiveQuery)) {
         try {
-          Job viewJob = getJobByOperationId(urlSafeBase64ToHexString(atsHiveQuery.operationId));
-          saveJobInfoIfNeeded(atsHiveQuery, atsTezDag, viewJob);
-
-          atsJob = mergeAtsJobWithViewJob(atsHiveQuery, atsTezDag, viewJob);
+          Job viewJob = getJobByOperationId(atsHiveQuery.operationId);
+          TezDagId atsTezDag = getTezDagFromHiveQueryId(atsHiveQuery);
+          atsJob = mergeHiveAtsTez(atsHiveQuery, atsTezDag, viewJob);
         } catch (ItemNotFound itemNotFound) {
-          // Executed from HS2, but outside of Hive View
-          atsJob = atsOnlyJob(atsHiveQuery, atsTezDag);
+          LOG.error("Ignore : {}", itemNotFound.getMessage());
+          continue;
         }
       } else {
+        TezDagId atsTezDag = getTezDagFromHiveQueryId(atsHiveQuery);
         atsJob = atsOnlyJob(atsHiveQuery, atsTezDag);
       }
-      allJobs.add(atsJob);
 
-      addedOperationIds.add(atsHiveQuery.operationId);
+      atsJob.setHiveQueryId(atsHiveQuery.entity);
+      allJobs.add(atsJob);
     }
 
     return allJobs;
   }
 
+  /**
+   * @param atsHiveQuery
+   * @param atsTezDag
+   * @param viewJob
+   * @return
+   */
+  private JobImpl mergeHiveAtsTez(HiveQueryId atsHiveQuery, TezDagId atsTezDag, Job viewJob) throws ItemNotFound {
+    saveJobInfoIfNeeded(atsHiveQuery, atsTezDag, viewJob);
+    return mergeAtsJobWithViewJob(atsHiveQuery, atsTezDag, viewJob);
+  }
+
   public Job readATSJob(Job viewJob) throws ItemNotFound {
 
     if (viewJob.getStatus().equals(Job.JOB_STATE_INITIALIZED) || viewJob.getStatus().equals(Job.JOB_STATE_UNKNOWN))
@@ -192,7 +337,7 @@ public class Aggregator {
   }
 
   protected Job getJobByOperationId(final String opId) throws ItemNotFound {
-    List<Job> operationHandles = viewJobResourceManager.readAll(new FilteringStrategy() {
+    List<Job> jobs = viewJobResourceManager.readAll(new FilteringStrategy() {
       @Override
       public boolean isConform(Indexed item) {
         Job opHandle = (Job) item;
@@ -205,20 +350,9 @@ public class Aggregator {
       }
     });
 
-    if (operationHandles.size() != 1)
+    if (jobs.size() != 1)
       throw new ItemNotFound();
 
-    return viewJobResourceManager.read(operationHandles.get(0).getId());
-  }
-
-  protected static String urlSafeBase64ToHexString(String urlsafeBase64) {
-    byte[] decoded = Base64.decodeBase64(urlsafeBase64);
-
-    StringBuilder sb = new StringBuilder();
-    for (byte b : decoded) {
-      sb.append(String.format("%02x", b));
-    }
-    return sb.toString();
+    return jobs.get(0);
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java
index 3bc396d..53b91db 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java
@@ -38,6 +38,7 @@ import org.apache.ambari.view.hive2.resources.jobs.atsJobs.IATSParser;
 import org.apache.ambari.view.hive2.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobController;
 import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobImpl;
+import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobInfo;
 import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobResourceManager;
 import org.apache.ambari.view.hive2.utils.MisconfigurationFormattedException;
 import org.apache.ambari.view.hive2.utils.NotFoundFormattedException;
@@ -457,20 +458,50 @@ public class JobService extends BaseService {
    */
   @GET
   @Produces(MediaType.APPLICATION_JSON)
-  public Response getList() {
+  public List<Job> getList(@QueryParam("startTime") long startTime, @QueryParam("endTime") long endTime) {
     try {
-      LOG.debug("Getting all job");
-      List<Job> allJobs = getAggregator().readAll(context.getUsername());
+
+      LOG.debug("Getting all job: startTime: {}, endTime: {}",startTime,endTime);
+      List<Job> allJobs = getAggregator().readAllForUserByTime(context.getUsername(),startTime, endTime);
       for(Job job : allJobs) {
         job.setSessionTag(null);
       }
 
-      JSONObject object = new JSONObject();
-      object.put("jobs", allJobs);
-      return Response.ok(object).build();
+      LOG.info("allJobs : {}", allJobs);
+      return allJobs;
+    } catch (WebApplicationException ex) {
+      LOG.error("Exception occured while fetching all jobs.", ex);
+      throw ex;
+    } catch (Exception ex) {
+      LOG.error("Exception occured while fetching all jobs.", ex);
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * fetch the jobs with given info.
+   * provide as much info about the job so that next api can optimize the fetch process.
+   * @param jobInfos
+   * @return
+   */
+  @Path("/getList")
+  @POST
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public List<Job> getList(List<JobInfo> jobInfos) {
+    try {
+      LOG.debug("fetching jobs with ids :{}", jobInfos);
+      List<Job> allJobs = getAggregator().readJobsByIds(jobInfos);
+      for(Job job : allJobs) {
+        job.setSessionTag(null);
+      }
+
+      return allJobs;
     } catch (WebApplicationException ex) {
+      LOG.error("Exception occured while fetching all jobs.", ex);
       throw ex;
     } catch (Exception ex) {
+      LOG.error("Exception occured while fetching all jobs.", ex);
       throw new ServiceFormattedException(ex.getMessage(), ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSParser.java
index e465276..0e19e0e 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSParser.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSParser.java
@@ -32,7 +32,7 @@ import java.util.List;
  */
 public class ATSParser implements IATSParser {
   protected final static Logger LOG =
-      LoggerFactory.getLogger(ATSParser.class);
+    LoggerFactory.getLogger(ATSParser.class);
 
   private ATSRequestsDelegate delegate;
 
@@ -42,13 +42,36 @@ public class ATSParser implements IATSParser {
     this.delegate = delegate;
   }
 
+  /**
+   * returns all HiveQueryIDs from ATS for the given user.
+   * @param username
+   * @return
+   */
   @Override
-  public List<HiveQueryId> getHiveQueryIdsList(String username) {
-    JSONObject entities = delegate.hiveQueryIdList(username);
+  public List<HiveQueryId> getHiveQueryIdsForUser(String username) {
+    JSONObject entities = delegate.hiveQueryIdsForUser(username);
+    return parseHqidJsonFromATS(entities);
+  }
+
+  /**
+   * parses the JSONArray or hive query IDs
+   * @param entities: should contain 'entities' element as JSONArray
+   * @return
+   */
+  private List<HiveQueryId> parseHqidJsonFromATS(JSONObject entities) {
     JSONArray jobs = (JSONArray) entities.get("entities");
 
-    List<HiveQueryId> parsedJobs = new LinkedList<HiveQueryId>();
-    for(Object job : jobs) {
+    return getHqidListFromJsonArray(jobs);
+  }
+
+  /**
+   * parses List of HiveQueryIds from JSON
+   * @param jobs
+   * @return
+   */
+  private List<HiveQueryId> getHqidListFromJsonArray(JSONArray jobs) {
+    List<HiveQueryId> parsedJobs = new LinkedList<>();
+    for (Object job : jobs) {
       try {
         HiveQueryId parsedJob = parseAtsHiveJob((JSONObject) job);
         parsedJobs.add(parsedJob);
@@ -81,9 +104,12 @@ public class ATSParser implements IATSParser {
   @Override
   public HiveQueryId getHiveQueryIdByOperationId(String guidString) {
     JSONObject entities = delegate.hiveQueryIdByOperationId(guidString);
+    return getHiveQueryIdFromJson(entities);
+  }
+
+  private HiveQueryId getHiveQueryIdFromJson(JSONObject entities) {
     JSONArray jobs = (JSONArray) entities.get("entities");
 
-    assert jobs.size() <= 1;
     if (jobs.size() == 0) {
       return new HiveQueryId();
     }
@@ -91,6 +117,18 @@ public class ATSParser implements IATSParser {
     return parseAtsHiveJob((JSONObject) jobs.get(0));
   }
 
+  /**
+   * returns the hive entity from ATS. empty object if not found.
+   *
+   * @param hiveId: the entityId of the hive
+   * @return: empty entity if not found else HiveQueryId
+   */
+  @Override
+  public HiveQueryId getHiveQueryIdByHiveEntityId(String hiveId) {
+    JSONObject entity = delegate.hiveQueryEntityByEntityId(hiveId);
+    return parseAtsHiveJob(entity);
+  }
+
   @Override
   public TezDagId getTezDAGByName(String name) {
     JSONArray tezDagEntities = (JSONArray) delegate.tezDagByName(name).get("entities");
@@ -103,6 +141,32 @@ public class ATSParser implements IATSParser {
     return parseTezDag(tezDagEntities);
   }
 
+  /**
+   * fetches the HIVE_QUERY_ID from ATS for given user between given time period
+   *
+   * @param username:  username for which to fetch hive query IDs
+   * @param startTime: time in miliseconds, inclusive
+   * @param endTime:   time in miliseconds, exclusive
+   * @return: List of HIVE_QUERY_ID
+   */
+  @Override
+  public List<HiveQueryId> getHiveQueryIdsForUserByTime(String username, long startTime, long endTime) {
+    JSONObject entities = delegate.hiveQueryIdsForUserByTime(username, startTime, endTime);
+    return parseHqidJsonFromATS(entities);
+  }
+
+  @Override
+  public List<HiveQueryId> getHiveQueryIdByEntityList(List<String> hiveIds) {
+    List<HiveQueryId> hiveQueryIds = new LinkedList<>();
+    for (String id : hiveIds) {
+      HiveQueryId hqi = this.getHiveQueryIdByHiveEntityId(id);
+      if (null != hqi.entity) {
+        hiveQueryIds.add(hqi);
+      }
+    }
+    return hiveQueryIds;
+  }
+
   private TezDagId parseTezDag(JSONArray tezDagEntities) {
     assert tezDagEntities.size() <= 1;
     if (tezDagEntities.size() == 0) {
@@ -123,7 +187,7 @@ public class ATSParser implements IATSParser {
 
     parsedJob.entity = (String) job.get("entity");
     parsedJob.url = delegate.hiveQueryIdDirectUrl((String) job.get("entity"));
-    parsedJob.starttime = ((Long) job.get("starttime")) / MillisInSecond;
+    parsedJob.starttime = ((Long) job.get("starttime"));
 
     JSONObject primaryfilters = (JSONObject) job.get("primaryfilters");
     JSONArray operationIds = (JSONArray) primaryfilters.get("operationid");
@@ -136,9 +200,9 @@ public class ATSParser implements IATSParser {
     }
 
     JSONObject lastEvent = getLastEvent(job);
-    long lastEventTimestamp = ((Long) lastEvent.get("timestamp")) / MillisInSecond;
+    long lastEventTimestamp = ((Long) lastEvent.get("timestamp"));
 
-    parsedJob.duration = lastEventTimestamp - parsedJob.starttime;
+    parsedJob.duration = (lastEventTimestamp - parsedJob.starttime) / MillisInSecond;
 
     JSONObject otherinfo = (JSONObject) job.get("otherinfo");
     if (otherinfo.get("QUERY") != null) {  // workaround for HIVE-10829

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSRequestsDelegate.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSRequestsDelegate.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSRequestsDelegate.java
index ac8cd22..8f7aa61 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSRequestsDelegate.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSRequestsDelegate.java
@@ -31,7 +31,7 @@ public interface ATSRequestsDelegate {
 
   String tezVerticesListForDAGUrl(String dagId);
 
-  JSONObject hiveQueryIdList(String username);
+  JSONObject hiveQueryIdsForUser(String username);
 
   JSONObject hiveQueryIdByOperationId(String operationId);
 
@@ -40,4 +40,8 @@ public interface ATSRequestsDelegate {
   JSONObject tezVerticesListForDAG(String dagId);
 
   JSONObject tezDagByEntity(String entity);
+
+  JSONObject hiveQueryIdsForUserByTime(String username, long startTime, long endTime);
+
+  JSONObject hiveQueryEntityByEntityId(String hiveEntityId);
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
index 67497fd..3fd4f6b 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
@@ -31,7 +31,7 @@ import java.util.HashMap;
 
 public class ATSRequestsDelegateImpl implements ATSRequestsDelegate {
   protected final static Logger LOG =
-      LoggerFactory.getLogger(ATSRequestsDelegateImpl.class);
+    LoggerFactory.getLogger(ATSRequestsDelegateImpl.class);
   public static final String EMPTY_ENTITIES_JSON = "{ \"entities\" : [  ] }";
 
   private ViewContext context;
@@ -76,7 +76,7 @@ public class ATSRequestsDelegateImpl implements ATSRequestsDelegate {
   }
 
   @Override
-  public JSONObject hiveQueryIdList(String username) {
+  public JSONObject hiveQueryIdsForUser(String username) {
     String hiveQueriesListUrl = atsUrl + "/ws/v1/timeline/HIVE_QUERY_ID?primaryFilter=requestuser:" + username;
     String response = readFromWithDefault(hiveQueriesListUrl, "{ \"entities\" : [  ] }");
     return (JSONObject) JSONValue.parse(response);
@@ -85,7 +85,7 @@ public class ATSRequestsDelegateImpl implements ATSRequestsDelegate {
   @Override
   public JSONObject hiveQueryIdByOperationId(String operationId) {
     String hiveQueriesListUrl = hiveQueryIdOperationIdUrl(operationId);
-    String response = readFromWithDefault(hiveQueriesListUrl, "{ \"entities\" : [  ] }");
+    String response = readFromWithDefault(hiveQueriesListUrl, EMPTY_ENTITIES_JSON);
     return (JSONObject) JSONValue.parse(response);
   }
 
@@ -103,6 +103,35 @@ public class ATSRequestsDelegateImpl implements ATSRequestsDelegate {
     return (JSONObject) JSONValue.parse(response);
   }
 
+  /**
+   * fetches the HIVE_QUERY_ID from ATS for given user between given time period
+   * @param username: username for which to fetch hive query IDs
+   * @param startTime: time in miliseconds, inclusive
+   * @param endTime: time in miliseconds, exclusive
+   * @return
+   */
+  @Override
+  public JSONObject hiveQueryIdsForUserByTime(String username, long startTime, long endTime) {
+    StringBuilder url = new StringBuilder();
+    url.append(atsUrl).append("/ws/v1/timeline/HIVE_QUERY_ID?")
+      .append("windowStart=").append(startTime)
+      .append("&windowEnd=").append(endTime)
+      .append("&primaryFilter=requestuser:").append(username);
+    String hiveQueriesListUrl = url.toString();
+
+    String response = readFromWithDefault(hiveQueriesListUrl, EMPTY_ENTITIES_JSON);
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  @Override
+  public JSONObject hiveQueryEntityByEntityId(String hiveEntityId) {
+    StringBuilder url = new StringBuilder();
+    url.append(atsUrl).append("/ws/v1/timeline/HIVE_QUERY_ID/").append(hiveEntityId);
+    String hiveQueriesListUrl = url.toString();
+    String response = readFromWithDefault(hiveQueriesListUrl, EMPTY_ENTITIES_JSON);
+    return (JSONObject) JSONValue.parse(response);
+  }
+
   private String tezDagEntityUrl(String entity) {
     return atsUrl + "/ws/v1/timeline/TEZ_DAG_ID?primaryFilter=callerId:" + entity;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/IATSParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/IATSParser.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/IATSParser.java
index e545c50..7c026a7 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/IATSParser.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/atsJobs/IATSParser.java
@@ -21,7 +21,7 @@ package org.apache.ambari.view.hive2.resources.jobs.atsJobs;
 import java.util.List;
 
 public interface IATSParser {
-  List<HiveQueryId> getHiveQueryIdsList(String username);
+  List<HiveQueryId> getHiveQueryIdsForUser(String username);
 
   List<TezVertexId> getVerticesForDAGId(String dagId);
 
@@ -30,4 +30,10 @@ public interface IATSParser {
   TezDagId getTezDAGByName(String name);
 
   TezDagId getTezDAGByEntity(String entity);
+
+  List<HiveQueryId> getHiveQueryIdsForUserByTime(String username, long startTime, long endTime);
+
+  HiveQueryId getHiveQueryIdByHiveEntityId(String hiveEntityId);
+
+  List<HiveQueryId> getHiveQueryIdByEntityList(List<String> hiveEntityIds);
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/Job.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/Job.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/Job.java
index 816e77a..d473bb6 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/Job.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/Job.java
@@ -125,7 +125,7 @@ public interface Job extends Serializable,Indexed,PersonalResource {
 
   void setGuid(String guid);
 
-  String getErrorFile();
+  String getHiveQueryId();
 
-  void setErrorFile(String errorFile);
+  void setHiveQueryId(String hiveQueryId);
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobControllerImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobControllerImpl.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobControllerImpl.java
index e94d727..66b8334 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobControllerImpl.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobControllerImpl.java
@@ -187,17 +187,16 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
 
     private static final long MillisInSecond = 1000L;
 
-    public void updateJobDuration() {
-        job.setDuration(System.currentTimeMillis() / MillisInSecond - job.getDateSubmitted());
-    }
-
-    public void setCreationDate() {
-        job.setDateSubmitted(System.currentTimeMillis() / MillisInSecond);
-    }
+  public void updateJobDuration() {
+    job.setDuration((System.currentTimeMillis() / MillisInSecond) - (job.getDateSubmitted() / MillisInSecond));
+  }
 
+  public void setCreationDate() {
+    job.setDateSubmitted(System.currentTimeMillis());
+  }
 
-    private void setupLogFile() {
-        LOG.debug("Creating log file for job#" + job.getId());
+  private void setupLogFile() {
+    LOG.debug("Creating log file for job#" + job.getId());
 
         String logFile = job.getStatusDir() + "/" + "logs";
         try {

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobImpl.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobImpl.java
index b71e2f7..17d585f 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobImpl.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobImpl.java
@@ -54,10 +54,11 @@ public class JobImpl implements Job {
 
   private String logFile;
   private String confFile;
-  private String errorFile;
 
   private String guid = null;
 
+  private String hiveQueryId;
+
   public JobImpl() {}
   public JobImpl(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
     for (Map.Entry<String, Object> entry : stringObjectMap.entrySet())  {
@@ -76,9 +77,8 @@ public class JobImpl implements Job {
 
     JobImpl job = (JobImpl) o;
 
-    if (id != null ? !id.equals(job.id) : job.id != null) return false;
+    return id != null ? id.equals(job.id) : job.id == null;
 
-    return true;
   }
 
   @Override
@@ -87,6 +87,18 @@ public class JobImpl implements Job {
   }
 
   @Override
+  @Transient
+  public String getHiveQueryId() {
+    return hiveQueryId;
+  }
+
+  @Override
+  @Transient
+  public void setHiveQueryId(String hiveQueryId) {
+    this.hiveQueryId = hiveQueryId;
+  }
+
+  @Override
   public String getId() {
     return id;
   }
@@ -309,14 +321,4 @@ public class JobImpl implements Job {
   public void setGuid(String guid) {
     this.guid = guid;
   }
-
-  @Override
-  public String getErrorFile() {
-    return errorFile;
-  }
-
-  @Override
-  public void setErrorFile(String errorFile) {
-    this.errorFile = errorFile;
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobInfo.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobInfo.java
new file mode 100644
index 0000000..1565140
--- /dev/null
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/viewJobs/JobInfo.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.jobs.viewJobs;
+
+public class JobInfo {
+  private String jobId;
+  private String hiveId;
+  private String dagId;
+  private String operationId;
+
+  public JobInfo() {
+  }
+
+  public JobInfo(String jobId, String hiveId, String dagId, String operationId) {
+    this.jobId = jobId;
+    this.hiveId = hiveId;
+    this.dagId = dagId;
+    this.operationId = operationId;
+  }
+
+  public String getJobId() {
+    return jobId;
+  }
+
+  public void setJobId(String jobId) {
+    this.jobId = jobId;
+  }
+
+  public String getHiveId() {
+    return hiveId;
+  }
+
+  public void setHiveId(String hiveId) {
+    this.hiveId = hiveId;
+  }
+
+  public String getDagId() {
+    return dagId;
+  }
+
+  public void setDagId(String dagId) {
+    this.dagId = dagId;
+  }
+
+  public String getOperationId() {
+    return operationId;
+  }
+
+  public void setOperationId(String operationId) {
+    this.operationId = operationId;
+  }
+
+  @Override
+  public String toString() {
+    return new StringBuilder().append("JobInfo{" )
+      .append("jobId=").append(jobId)
+      .append(", hiveId=").append(hiveId)
+      .append(", dagId=").append(dagId)
+      .append(", operationId=").append(operationId)
+      .append('}').toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/number-range-widget.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/number-range-widget.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/number-range-widget.js
index 5d62b59..3b340ad 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/number-range-widget.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/number-range-widget.js
@@ -54,17 +54,26 @@ export default Ember.Component.extend({
     numberRange.set('fromDuration', utils.secondsToHHMMSS(numberRange.get('from')));
     numberRange.set('toDuration', utils.secondsToHHMMSS(numberRange.get('to')));
   },
-  updateMin: function () {
+  updateFrom: function () {
     if (this.get('rendered')) {
       this.$('.slider').slider('values', 0, this.get('numberRange.from'));
       this.updateRangeLables();
     }
   }.observes('numberRange.from'),
 
-  updateMax: function () {
+  updateTo: function () {
     if (this.get('rendered')) {
       this.$('.slider').slider('values', 1, this.get('numberRange.to'));
       this.updateRangeLables();
     }
-  }.observes('numberRange.to')
+  }.observes('numberRange.to'),
+
+  updateMin: function(){
+    this.$( ".slider" ).slider( "option", "min", this.get('numberRange.min') );
+  }.observes('numberRange.min'),
+
+  updateMax: function(){
+    this.$( ".slider" ).slider( "option", "max", this.get('numberRange.max') );
+  }.observes('numberRange.max')
+
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/history.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/history.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/history.js
index 8c4ed2f..ca6233c 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/history.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/history.js
@@ -23,50 +23,87 @@ import constants from 'hive/utils/constants';
 export default Ember.ArrayController.extend(FilterableMixin, {
   jobService: Ember.inject.service('job'),
   fileService: Ember.inject.service('file'),
-
+  historyService: Ember.inject.service('history'),
+  NUM_OF_DAYS: 5,
+  REFRESH_INTERVAL_SEC: 30000,
   sortAscending: false,
   sortProperties: ['dateSubmittedTimestamp'],
 
+  refresher: function () {
+    var self = this;
+    Ember.run.later(function () {
+      if (self.get('isShowing')) {
+        self.refresh();
+      }
+      self.refresher();
+    }, self.get('REFRESH_INTERVAL_SEC'));
+  },
+  onLoadRoute: function () {
+    this.set('isShowing', true);
+  },
+  onUnloadRoute: function () {
+    this.set('isShowing', false);
+  },
   init: function () {
-    var oneMonthAgo = new Date();
-    oneMonthAgo.setMonth(oneMonthAgo.getMonth() - 1);
-
     this._super();
+    var self = this;
+    var fromTime = moment().subtract(this.get('NUM_OF_DAYS'), 'days').startOf('day');
+    var time = moment();
+    var toTime = moment({
+      years: time.year(),
+      months: time.month(),
+      date: time.date(),
+      hours: 23,
+      minutes: 59,
+      seconds: 59,
+      milliseconds: 999
+    }); // next 12AM
 
-    this.set('columns', Ember.ArrayProxy.create({ content: Ember.A([
-      Ember.Object.create({
-        caption: 'columns.title',
-        property: 'title',
-        link: constants.namingConventions.subroutes.historyQuery
-      }),
-      Ember.Object.create({
-        caption: 'columns.status',
-        property: 'status'
-      }),
-      Ember.Object.create({
-        caption: 'columns.date',
-        property: 'dateSubmittedTimestamp',
-        dateRange: Ember.Object.create({
-          min: oneMonthAgo,
-          max: new Date()
+    this.set('columns', Ember.ArrayProxy.create({
+      content: Ember.A([
+        Ember.Object.create({
+          caption: 'columns.title',
+          property: 'title',
+          link: constants.namingConventions.subroutes.historyQuery
+        }),
+        Ember.Object.create({
+          caption: 'columns.status',
+          property: 'status'
+        }),
+        Ember.Object.create({
+          caption: 'columns.date',
+          property: 'dateSubmittedTimestamp',
+          dateRange: Ember.Object.create({
+            min: fromTime.toDate(),
+            max: toTime.toDate()
+          })
+        }),
+        Ember.Object.create({
+          caption: 'columns.duration',
+          property: 'duration',
+          numberRange: Ember.Object.create({
+            min: 0,
+            max: 10,
+            units: 'sec'
+          })
         })
-      }),
-      Ember.Object.create({
-        caption: 'columns.duration',
-        property: 'duration',
-        numberRange: Ember.Object.create({
-          min: 0,
-          max: 10,
-          units: 'sec'
-        })
-      })
-    ])}));
-  },
-
-  model: function () {
-    return this.filter(this.get('history'));
-  }.property('history', 'filters.@each'),
+      ])
+    }));
 
+    return this.updateJobs(fromTime, toTime).then(function (data) {
+      self.applyDurationFilter();
+      self.refresher();
+    });
+  },
+  applyDurationFilter: function () {
+    var self = this;
+    var durationColumn = this.get('columns').find(function (column) {
+      return column.get('caption') === 'columns.duration';
+    });
+    var from = durationColumn.get('numberRange.from');
+    var to = durationColumn.get('numberRange.to');
+    self.filterBy("duration", {min: from, max: to});
+  },
   updateIntervals: function () {
     var durationColumn;
     var maxDuration;
@@ -86,51 +123,101 @@ export default Ember.ArrayController.extend(FilterableMixin, {
 
       durationColumn.set('numberRange.min', minDuration);
       durationColumn.set('numberRange.max', maxDuration);
+      var from = durationColumn.get('numberRange.from');
+      var to = durationColumn.get('numberRange.to');
+      if (from > maxDuration) {
+        durationColumn.set("numberRange.from", maxDuration);
+      }
+      if (to < minDuration) {
+        durationColumn.set("numberRange.to", minDuration);
+      }
     }
   }.observes('history'),
 
-  updateDateRange: function () {
-    var dateColumn;
-    var maxDate;
-    var minDate;
-
-    if (this.get('columns')) {
-      dateColumn = this.get('columns').find(function (column) {
-        return column.get('caption') === 'columns.date';
-      });
-
-      var items = this.get('history').map(function (item) {
-        return item.get(dateColumn.get('property'));
-      });
-
-      minDate = items.length ? Math.min.apply(Math, items) : new Date();
-      maxDate = items.length ? Math.max.apply(Math, items) : new Date();
+  model: function () {
+    return this.filter(this.get('history'));
+  }.property('history', 'filters.@each'),
 
-      dateColumn.set('dateRange.min', minDate);
-      dateColumn.set('dateRange.max', maxDate);
-    }
-  }.observes('history'),
+  updateJobs: function (fromDate, toDate) {
+    var self = this;
+    var fromTime = moment(fromDate).startOf('day').toDate().getTime();
+    var time = moment(toDate);
+    var toTime = moment({
+      years: time.year(),
+      months: time.month(),
+      date: time.date(),
+      hours: 23,
+      minutes: 59,
+      seconds: 59,
+      milliseconds: 999
+    }).toDate().getTime(); // next 12AM
+    this.set("fromTime", fromTime);
+    this.set("toTime", toTime);
+    return this.get("historyService").getJobs(fromTime, toTime).then(function (data) {
+      self.set('history', data);
+    });
+  },
 
   filterBy: function (filterProperty, filterValue, exactMatch) {
     var column = this.get('columns').find(function (column) {
       return column.get('property') === filterProperty;
     });
 
+    var isDateColumn = column.get('caption') === 'columns.date';
+
     if (column) {
       column.set('filterValue', filterValue, exactMatch);
+      if (isDateColumn) {
+
+        return this.updateJobs(filterValue.min, filterValue.max);
+      } else {
+        this.updateFilters(filterProperty, filterValue, exactMatch);
+      }
     } else {
       this.updateFilters(filterProperty, filterValue, exactMatch);
     }
   },
 
+  refresh: function () {
+    var self = this;
+    this.get('historyService').getUpdatedJobList(this.get('toTime')).then(function (data) {
+      self.set('history', data);
+    });
+  },
+
   actions: {
+
+    refreshJobs: function () {
+      this.refresh();
+    },
+
+    filterUpdated: function (filterProperty, filterValue) {
+      var self = this;
+      var column = this.get('columns').find(function (column) {
+        return column.get('property') === filterProperty;
+      });
+
+      var isDateColumn = (column.get('caption') === 'columns.date');
+
+      if (column) {
+        column.set('filterValue', filterValue);
+        if (isDateColumn) {
+          return this.updateJobs(filterValue.min, filterValue.max).then(function (data) {
+            self.updateFilters(filterProperty, filterValue);
+          });
+        } else {
+          self.updateFilters(filterProperty, filterValue);
+        }
+      }
+    },
+
     sort: function (property) {
       //if same column has been selected, toggle flag, else default it to true
       if (this.get('sortProperties').objectAt(0) === property) {
         this.set('sortAscending', !this.get('sortAscending'));
       } else {
         this.set('sortAscending', true);
-        this.set('sortProperties', [ property ]);
+        this.set('sortProperties', [property]);
       }
     },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js
index 056db29..578101f 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js
@@ -197,6 +197,7 @@ TRANSLATIONS = {
     stoppingJob: 'Stopping...',
     close: 'Close',
     clearFilters: 'Clear filters',
+    refresh: 'Refresh',
     expand: 'Expand message',
     collapse: 'Collapse message',
     previousPage: 'previous',
@@ -214,7 +215,7 @@ TRANSLATIONS = {
     noTablesMatch: 'No tables match',
     noColumnsMatch: 'No columns match',
     table: 'Table ',
-    hoursShort: "{{hours}} hrs",
+    hrsShort: "{{hours}} hrs",
     minsShort: "{{minutes}} mins",
     secsShort: "{{seconds}} secs"
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/resources/ui/hive-web/app/models/job.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/models/job.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/models/job.js
index 9079b5a..185f512 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/models/job.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/models/job.js
@@ -21,6 +21,7 @@ import DS from 'ember-data';
 export default DS.Model.extend({
   title: DS.attr('string'),
   queryId: DS.attr(),
+  hiveQueryId: DS.attr('string'),
   queryFile: DS.attr('string'),
   owner: DS.attr('string'),
   dataBase: DS.attr('string'),
@@ -43,7 +44,7 @@ export default DS.Model.extend({
   dateSubmittedTimestamp: function () {
     var date = this.get('dateSubmitted');
 
-    return date ? date * 1000 : date;
+    return date; // ? date * 1000 : date; now dateSubmitted itself is in miliseconds. so conversion not required.
   }.property('dateSubmitted'),
 
   uppercaseStatus: function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js
index 0aa3d41..e9fcf88 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js
@@ -17,23 +17,13 @@
  */
 
 import Ember from 'ember';
-import constants from 'hive/utils/constants';
 
 export default Ember.Route.extend({
-  notifyService: Ember.inject.service(constants.namingConventions.notify),
-
-  model: function () {
-    var self = this;
-
-    return this.store.find(constants.namingConventions.job).catch(function (error) {
-      self.get('notifyService').error(error);
-    });
+  deactivate: function () {
+    this.controller.onUnloadRoute();
   },
 
   setupController: function (controller, model) {
-    if (!model) {
-      return;
-    }
-    controller.set('history', model);
+    this.controller.onLoadRoute();
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/resources/ui/hive-web/app/services/history.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/services/history.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/services/history.js
new file mode 100644
index 0000000..4998d19
--- /dev/null
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/services/history.js
@@ -0,0 +1,204 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import Job from 'hive/models/job'
+import constants from 'hive/utils/constants';
+
+export default Ember.Service.extend({
+  historyJobsMap: {},
+  store: Ember.inject.service(),
+  fromDate: null,
+  toDate: null,
+
+  getJobs: function (fromDate, toDate) {
+    var self = this;
+    console.log("getJobs : fromDate : ", fromDate, ", toDate : ", toDate);
+
+    if (Ember.isEmpty(fromDate) || Ember.isEmpty(toDate)) {
+      throw new Error("Dates cannot be empty.");
+    }
+    if (toDate < fromDate) {
+      throw new Error("toDate cannot be smaller than fromDate");
+    }
+
+    var currFromDate = this.get("fromDate");
+    var currToDate = this.get("toDate");
+    var currJobsMap = this.get("historyJobsMap");
+
+    if (!Ember.isEmpty(currFromDate) && !Ember.isEmpty(currToDate)
+      && currFromDate <= fromDate && currToDate >= toDate
+      && !Ember.isEmpty(currJobsMap)
+    ) {
+      // filter current jobs and return
+      var validJobs = [];
+      Object.keys(currJobsMap).forEach(function (id) {
+        var job = currJobsMap[id];
+        if (job.get('dateSubmitted') >= fromDate && job.get('dateSubmitted') < toDate) {
+          validJobs.push(job);
+        }
+      });
+
+      return Ember.RSVP.Promise.resolve(validJobs);
+    }
+
+    return this.fetchJobs(fromDate, toDate).then(function (data) {
+      var jobMap = {};
+      var jobs = data.map(function (j) {
+        var job = this.get('store').push('job', j);
+        jobMap[job.id] = job;
+        return job;
+      }, self);
+      self.set('fromDate', fromDate);
+      self.set('toDate', toDate);
+      self.set('historyJobsMap', jobMap);
+      return jobs;
+    });
+  },
+
+  fetchJobs: function (fromDate, toDate) {
+    console.log("getJobs : fromDate : ", fromDate, ", toDate : ", toDate);
+
+    if (Ember.isEmpty(fromDate) || Ember.isEmpty(toDate)) {
+      throw new Error("Dates cannot be empty.");
+    }
+    if (toDate < fromDate) {
+      throw new Error("toDate cannot be smaller than fromDate");
+    }
+
+    var self = this;
+    var url = this.container.lookup('adapter:application').buildURL();
+    url += "/jobs";
+    var jobMap = {};
+    return Ember.$.ajax({
+      url: url,
+      type: 'GET',
+      data: {
+        "startTime": fromDate,
+        "endTime": toDate
+      },
+      headers: {
+        'X-Requested-By': 'ambari'
+      }
+    });
+  },
+
+  fetchAndMergeNew: function (toTime) {
+    var self = this;
+    return this.fetchNew(toTime).then(function (data) {
+      var jobMap = self.get('historyJobsMap');
+      var jobs = data.map(function (j) {
+        var job = this.get('store').push('job', j);
+        jobMap[job.id] = job;
+        return job;
+      }, self);
+      self.set('toDate', toTime);
+      return jobs;
+    });
+  },
+
+  getUpdatedJobList: function (toTime) {
+    var self = this;
+    return this.refreshAndFetchNew(toTime).then(function (data) {
+      var jobMap = self.get('historyJobsMap');
+      var allJobs = Object.keys(jobMap).map(function (id) {
+        return jobMap[id];
+      });
+      return allJobs;
+    });
+  },
+
+  fetchNew: function (toTime) {
+    var self = this;
+    var jobMap = this.get('historyJobsMap');
+    var fromTime = 0;
+    if (this.get('fromDate')) {
+      fromTime = this.get('fromDate');
+    }
+
+    Object.keys(jobMap).forEach(function (id) {
+      var job = jobMap[id];
+      fromTime = Math.max(fromTime, job.get('dateSubmitted'));
+    });
+
+    if (fromTime > toTime) {
+      // we already have latest data.
+      return Ember.RSVP.Promise.resolve([]);
+    }
+    return this.fetchJobs(fromTime, toTime);
+  },
+
+  refresh: function () {
+    var self = this;
+    var url = this.container.lookup('adapter:application').buildURL();
+    url += "/jobs/getList";
+    var jobMap = this.get('historyJobsMap');
+    var statuses = constants.statuses;
+    var jobIds = [];
+    Object.keys(jobMap).forEach(function (id) {
+      var job = jobMap[id];
+      var jobStatus = job.get('uppercaseStatus');
+      if (jobStatus === statuses.initialized
+        || jobStatus === statuses.pending
+        || jobStatus === statuses.running
+        || jobStatus === statuses.unknown
+      ) {
+        // note jobId will either have DB's id or hiveId
+        jobIds.push({
+          jobId: job.get('id'),
+          hiveId: job.get('hiveQueryId'),
+          dagId: job.get('dagId'),
+          operationId: job.get('operationId')
+        });
+      }
+    });
+
+    if (Ember.isEmpty(jobIds)) {
+      return Ember.RSVP.Promise.resolve([]);
+    }
+    console.log("refresh jobIds to refresh : ", jobIds);
+    return Ember.$.ajax({
+      url: url,
+      type: 'POST',
+      data: JSON.stringify(jobIds),
+      headers: {
+        'X-Requested-By': 'ambari'
+      },
+      contentType: "application/json"
+    }).then(function (data) {
+      var jobs = data.map(function (j) {
+        var job = this.get('store').push('job', j);
+        jobMap[job.id] = job;
+        return job;
+      }, self);
+      self.set('historyJobsMap', jobMap);
+      // return all the jobs
+      var allJobs = Object.keys(jobMap).map(function (id) {
+        return jobMap[id];
+      });
+      return allJobs;
+    });
+  },
+
+  refreshAndFetchNew: function (toTime) {
+    var self = this;
+    return this.refresh().then(function (data) {
+      return self.fetchAndMergeNew(toTime);
+    })
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/history.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/history.hbs b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/history.hbs
index 052498e..7121b85 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/history.hbs
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/history.hbs
@@ -15,49 +15,53 @@
 * See the License for the specific language governing permissions and
 * limitations under the License.
 }}
-
 <table class="table table-expandable">
-  <thead>
+    <thead>
     <tr>
-      {{#each column in columns}}
+        {{#each column in columns}}
         <th>
-          {{#if column.caption}}
+            {{#if column.caption}}
             {{column-filter-widget class="pull-left"
-                                   column=column
-                                   filterValue=column.filterValue
-                                   sortAscending=controller.sortAscending
-                                   sortProperties=controller.sortProperties
-                                   columnSorted="sort"
-                                   columnFiltered="filter"}}
-          {{else}}
+            column=column
+            filterValue=column.filterValue
+            sortAscending=controller.sortAscending
+            sortProperties=controller.sortProperties
+            columnSorted="sort"
+            columnFiltered="filterUpdated"}}
+            {{else}}
             {{tb-helper "caption" column}}
-          {{/if}}
+            {{/if}}
+        </th>
+        {{/each}}
+        <th>
+            <button type="btn" class="btn btn-primary btn-sm icon-refresh" {{action
+            "refreshJobs"}}><i class="fa fa-refresh" aria-hidden="true"></i>
+            {{t "buttons.refresh"}}</button>
+
+            <button type="btn" class="btn btn-sm btn-warning pull-right clear-filters" {{action
+            "clearFilters"}}>{{t "buttons.clearFilters"}}</button>
         </th>
-      {{/each}}
-      <th>
-        <button type="btn" class="btn btn-sm btn-warning pull-right clear-filters" {{action "clearFilters"}}>{{t "buttons.clearFilters"}}</button>
-      </th>
     </tr>
-  </thead>
-  <tbody>
+    </thead>
+    <tbody>
     {{#if history.length}}
-      {{#if model.length}}
-        {{#each item in this}}
-          {{job-tr-view job=item onStopJob="interruptJob" onFileRequested="loadFile"}}
-        {{/each}}
-      {{else}}
-        <tr>
-          <td colspan="5">
+    {{#if model.length}}
+    {{#each item in this}}
+    {{job-tr-view job=item onStopJob="interruptJob" onFileRequested="loadFile"}}
+    {{/each}}
+    {{else}}
+    <tr>
+        <td colspan="5">
             <h4 class="empty-list">{{t "emptyList.history.noMatches"}}</h4>
-          </td>
-        </tr>
-      {{/if}}
+        </td>
+    </tr>
+    {{/if}}
     {{else}}
-      <tr>
+    <tr>
         <td colspan="5">
-          <h4 class="empty-list">{{t "emptyList.history.noItems"}}</h4>
+            <h4 class="empty-list">{{t "emptyList.history.noItems"}}</h4>
         </td>
-      </tr>
+    </tr>
     {{/if}}
-  </tbody>
+    </tbody>
 </table>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebaed6ad/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js
index e40e447..bbe42cf 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js
@@ -61,13 +61,13 @@ export default Ember.Object.create({
     insertUdfs: 'insert-udfs',
     job: 'job',
     jobs: 'jobs',
-    history: 'history',
     savedQuery: 'saved-query',
     database: 'database',
     databases: 'databases',
     openQueries: 'open-queries',
     visualExplain: 'visual-explain',
     notify: 'notify',
+    history: 'history',
     tezUI: 'tez-ui',
     file: 'file',
     fileResource: 'file-resource',