You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by al...@apache.org on 2015/03/12 20:31:26 UTC

[1/6] ambari git commit: AMBARI-10035. Hive View: Retrieve history from ATS (alexantonenko)

Repository: ambari
Updated Branches:
  refs/heads/trunk 751b31047 -> db9731272


http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
new file mode 100644
index 0000000..08e8b81
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
@@ -0,0 +1,396 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs;
+
+import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.resources.IResourceManager;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.HiveQueryId;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.TezDagId;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl;
+import org.apache.hive.service.cli.thrift.TOperationHandle;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.*;
+
+public class AggregatorTest {
+
+  public static final String SOME_QUERY = "some query";
+
+  @Test
+  public void testReadJobOutsideOfHS2() throws Exception {
+    HiveQueryId hiveQueryId = getSampleHiveQueryId("ENTITY-NAME");
+    ensureOperationIdUnset(hiveQueryId);
+
+    MockATSParser atsParser = getMockATSWithQueries(hiveQueryId);
+
+
+    Aggregator aggregator = new Aggregator(getEmptyJobResourceManager(),
+        getEmptyOperationHandleResourceManager(),
+        atsParser);
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(1, aggregated.size());
+    Job job = aggregated.get(0);
+    Assert.assertEquals("ENTITY-NAME", job.getId());
+    Assert.assertEquals(SOME_QUERY, job.getTitle());
+  }
+
+  @Test
+  public void testReadJobWithHS2OutsideOfView() throws Exception {
+    HiveQueryId hiveQueryId = getSampleHiveQueryId("ENTITY-NAME");
+    ensureOperationIdSet(hiveQueryId);
+
+    MockATSParser atsParser = getMockATSWithQueries(hiveQueryId);
+    Aggregator aggregator = new Aggregator(getEmptyJobResourceManager(),
+        getEmptyOperationHandleResourceManager(),
+        atsParser);
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(1, aggregated.size());
+    Job job = aggregated.get(0);
+    Assert.assertEquals("ENTITY-NAME", job.getId());
+    Assert.assertEquals(SOME_QUERY, job.getTitle());
+  }
+
+  @Test
+  public void testJobWithoutOperationIdShouldBeIgnored() throws Exception {
+    MockJobResourceManager jobResourceManager = getJobResourceManagerWithJobs(getSampleViewJob("1"));
+
+    Aggregator aggregator = new Aggregator(jobResourceManager,
+        getEmptyOperationHandleResourceManager(),
+        getEmptyATSParser());
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(0, aggregated.size());
+  }
+
+  @Test
+  public void testReadJobOnlyInView() throws Exception {
+    MockJobResourceManager jobResourceManager = getJobResourceManagerWithJobs(getSampleViewJob("1"));
+
+    StoredOperationHandle operationHandle = getSampleOperationHandle("5", "1");
+    MockOperationHandleResourceManager operationHandleResourceManager = getOperationHandleRMWithEntities(operationHandle);
+
+    Aggregator aggregator = new Aggregator(jobResourceManager,
+        operationHandleResourceManager,
+        getEmptyATSParser());
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(1, aggregated.size());
+    Job job = aggregated.get(0);
+    Assert.assertEquals("1", job.getId());
+  }
+
+  private MockOperationHandleResourceManager getOperationHandleRMWithEntities(StoredOperationHandle... operationHandles) {
+    MockOperationHandleResourceManager operationHandleResourceManager = getEmptyOperationHandleResourceManager();
+    HashMap<String, StoredOperationHandle> storage = new HashMap<String, StoredOperationHandle>();
+    for (StoredOperationHandle handle : operationHandles) {
+      storage.put(handle.getJobId(), handle);
+    }
+    operationHandleResourceManager.setStorage(storage);
+    return operationHandleResourceManager;
+  }
+
+  @Test
+  public void testReadJobBothATSAndView() throws Exception {
+    HiveQueryId hiveQueryId = getSampleHiveQueryId("ENTITY-NAME");
+    hiveQueryId.operationId = Aggregator.hexStringToUrlSafeBase64("1b2b");
+    MockATSParser atsParser = getMockATSWithQueries(hiveQueryId);
+
+    MockJobResourceManager jobResourceManager = getJobResourceManagerWithJobs(getSampleViewJob("1"));
+
+    StoredOperationHandle operationHandle = getSampleOperationHandle("5", "1");
+    operationHandle.setGuid("1b2b");
+    MockOperationHandleResourceManager operationHandleResourceManager = getOperationHandleRMWithEntities(operationHandle);
+
+    Aggregator aggregator = new Aggregator(jobResourceManager,
+        operationHandleResourceManager,
+        atsParser);
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(1, aggregated.size());
+    Job job = aggregated.get(0);
+    Assert.assertEquals("1", job.getId());
+  }
+
+  @Test
+  public void testReadJobComplex() throws Exception {
+    //job both on ATS and View
+    HiveQueryId hiveQueryId1 = getSampleHiveQueryId("ENTITY-NAME");
+    hiveQueryId1.operationId = Aggregator.hexStringToUrlSafeBase64("1a1b");
+    Job job1 = getSampleViewJob("1");
+    StoredOperationHandle operationHandle1 = getSampleOperationHandle("5", "1");
+    operationHandle1.setGuid("1a1b");
+
+    //job only on ATS
+    HiveQueryId hiveQueryId2 = getSampleHiveQueryId("ENTITY-NAME2");
+    hiveQueryId2.operationId = Aggregator.hexStringToUrlSafeBase64("2a2a");
+
+    //job only in View
+    Job job3 = getSampleViewJob("3");
+    StoredOperationHandle operationHandle3 = getSampleOperationHandle("6", "3");
+    operationHandle3.setGuid("3c3d");
+
+
+    MockATSParser atsParser = getMockATSWithQueries(
+        hiveQueryId1, hiveQueryId2);
+    MockJobResourceManager jobResourceManager = getJobResourceManagerWithJobs(
+        job1, job3);
+    MockOperationHandleResourceManager operationHandleRM = getOperationHandleRMWithEntities(
+        operationHandle1, operationHandle3);
+
+    Aggregator aggregator = new Aggregator(jobResourceManager,
+        operationHandleRM,
+        atsParser);
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(3, aggregated.size());
+  }
+
+  private MockJobResourceManager getJobResourceManagerWithJobs(Job... jobs) {
+    MockJobResourceManager jobResourceManager = getEmptyJobResourceManager();
+    jobResourceManager.setJobs(Arrays.asList(jobs));
+    return jobResourceManager;
+  }
+
+  private MockATSParser getEmptyATSParser() {
+    return new MockATSParser();
+  }
+
+  private void ensureOperationIdUnset(HiveQueryId hiveQueryId) {
+    hiveQueryId.operationId = null;
+  }
+
+  public void ensureOperationIdSet(HiveQueryId hiveQueryId) {
+    hiveQueryId.operationId = "operation-id";
+  }
+
+  private MockOperationHandleResourceManager getEmptyOperationHandleResourceManager() {
+    return new MockOperationHandleResourceManager();
+  }
+
+  private MockJobResourceManager getEmptyJobResourceManager() {
+    return new MockJobResourceManager();
+  }
+
+  private MockATSParser getMockATSWithQueries(HiveQueryId... hiveQueryIds) {
+    MockATSParser atsParser = getEmptyATSParser();
+    atsParser.setHiveQueryIds(Arrays.asList(hiveQueryIds));
+    return atsParser;
+  }
+
+  private JobImpl getSampleViewJob(String id) {
+    JobImpl job = new JobImpl();
+    job.setTitle("Test");
+    job.setId(id);
+    job.setOwner("luke");
+    return job;
+  }
+
+  private StoredOperationHandle getSampleOperationHandle(String id, String jobId) {
+    StoredOperationHandle opHandle = new StoredOperationHandle();
+    opHandle.setId(id);
+    opHandle.setJobId(jobId);
+    opHandle.setGuid("1b2b");
+    return opHandle;
+  }
+
+  private HiveQueryId getSampleHiveQueryId(String id) {
+    HiveQueryId hiveQueryId = new HiveQueryId();
+    hiveQueryId.entity = id;
+    hiveQueryId.query = SOME_QUERY;
+    hiveQueryId.user = "luke";
+    hiveQueryId.operationId = "fUjdt-VMRYuKRPCDTUr_rg";
+    hiveQueryId.dagNames = new LinkedList<String>();
+    return hiveQueryId;
+  }
+
+  @Test
+  public void testGetJobByOperationId() throws Exception {
+
+  }
+
+  @Test
+  public void testUrlSafeBase64ToHexString() throws Exception {
+    String urlSafe = Aggregator.hexStringToUrlSafeBase64("1a1b");
+    Assert.assertEquals("Ghs", urlSafe);
+  }
+
+  @Test
+  public void testHexStringToUrlSafeBase64() throws Exception {
+    String hex = Aggregator.urlSafeBase64ToHexString("Ghs");
+    Assert.assertEquals("1a1b", hex);
+  }
+
+  public static class MockJobResourceManager implements IResourceManager<Job> {
+
+    private List<Job> jobs = new LinkedList<Job>();
+
+    @Override
+    public Job create(Job object) {
+      return null;
+    }
+
+    @Override
+    public Job read(Object id) throws ItemNotFound {
+      for(Job job : jobs) {
+        if (job.getId().equals(id))
+          return job;
+      }
+      throw new ItemNotFound();
+    }
+
+    @Override
+    public List<Job> readAll(FilteringStrategy filteringStrategy) {
+      return jobs;
+    }
+
+    @Override
+    public Job update(Job newObject, String id) throws ItemNotFound {
+      return null;
+    }
+
+    @Override
+    public void delete(Object resourceId) throws ItemNotFound {
+
+    }
+
+    public List<Job> getJobs() {
+      return jobs;
+    }
+
+    public void setJobs(List<Job> jobs) {
+      this.jobs = jobs;
+    }
+  }
+
+  public static class MockOperationHandleResourceManager implements IOperationHandleResourceManager {
+    private HashMap<String, StoredOperationHandle> storage = new HashMap<String, StoredOperationHandle>();
+
+    public MockOperationHandleResourceManager() {
+
+    }
+
+    @Override
+    public List<StoredOperationHandle> readJobRelatedHandles(Job job) {
+      LinkedList<StoredOperationHandle> storedOperationHandles = new LinkedList<StoredOperationHandle>();
+      StoredOperationHandle operationHandle = storage.get(job.getId());
+      if (operationHandle != null)
+        storedOperationHandles.add(operationHandle);
+      return storedOperationHandles;
+    }
+
+    @Override
+    public void putHandleForJob(TOperationHandle h, Job job) {
+
+    }
+
+    @Override
+    public boolean containsHandleForJob(Job job) {
+      return false;
+    }
+
+    @Override
+    public TOperationHandle getHandleForJob(Job job) throws ItemNotFound {
+      List<StoredOperationHandle> handles = readJobRelatedHandles(job);
+      if (handles.size() == 0)
+        throw new ItemNotFound();
+      return handles.get(0).toTOperationHandle();
+    }
+
+    @Override
+    public StoredOperationHandle create(StoredOperationHandle object) {
+      return null;
+    }
+
+    @Override
+    public StoredOperationHandle read(Object id) throws ItemNotFound {
+      return null;
+    }
+
+    @Override
+    public List<StoredOperationHandle> readAll(FilteringStrategy filteringStrategy) {
+      LinkedList<StoredOperationHandle> storedOperationHandles = new LinkedList<StoredOperationHandle>();
+      for (StoredOperationHandle handle : storage.values()) {
+        if (filteringStrategy.isConform(handle))
+          storedOperationHandles.add(handle);
+      }
+      return storedOperationHandles;
+    }
+
+    @Override
+    public StoredOperationHandle update(StoredOperationHandle newObject, String id) throws ItemNotFound {
+      return null;
+    }
+
+    @Override
+    public void delete(Object resourceId) throws ItemNotFound {
+
+    }
+
+    public HashMap<String, StoredOperationHandle> getStorage() {
+      return storage;
+    }
+
+    public void setStorage(HashMap<String, StoredOperationHandle> storage) {
+      this.storage = storage;
+    }
+  }
+
+  public static class MockATSParser implements IATSParser {
+
+    private List<HiveQueryId> hiveQueryIds = new LinkedList<HiveQueryId>();
+
+    public MockATSParser() {
+    }
+
+    @Override
+    public List<HiveQueryId> getHiveQuieryIdsList(String username) {
+      return hiveQueryIds;
+    }
+
+    @Override
+    public HiveQueryId getHiveQuieryIdByOperationId(byte[] guid) {
+      return null;
+    }
+
+    @Override
+    public TezDagId getTezDAGByName(String name) {
+      return null;
+    }
+
+    public List<HiveQueryId> getHiveQueryIds() {
+      return hiveQueryIds;
+    }
+
+    public void setHiveQueryIds(List<HiveQueryId> hiveQueryIds) {
+      this.hiveQueryIds = hiveQueryIds;
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
index 78b6f1f..1c3444e 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
@@ -20,9 +20,9 @@ package org.apache.ambari.view.hive.resources.jobs;
 
 import org.apache.ambari.view.hive.ServiceTestUtils;
 import org.apache.ambari.view.hive.BaseHiveTest;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl;
 import org.apache.ambari.view.hive.utils.HdfsApiMock;
 import org.apache.ambari.view.hive.client.Connection;
-import org.apache.ambari.view.hive.client.ConnectionPool;
 import org.apache.ambari.view.hive.client.HiveClientException;
 import org.apache.ambari.view.hive.resources.savedQueries.SavedQuery;
 import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryService;
@@ -53,7 +53,12 @@ public class JobServiceTest extends BaseHiveTest {
   @AfterClass
   public static void shutDown() throws Exception {
     BaseHiveTest.shutDown(); // super
-    HdfsApi.dropAllConnections(); //cleanup API connection
+  }
+
+  @Override
+  @After
+  public void tearDown() throws Exception {
+    jobService.getSharedObjectsFactory().clear(HdfsApi.class);
   }
 
   @Override
@@ -65,7 +70,7 @@ public class JobServiceTest extends BaseHiveTest {
 
     Connection hiveConnection = configureHiveConnectionMock();
 
-    ConnectionPool.setInstance(context, hiveConnection);
+    jobService.getSharedObjectsFactory().setInstance(Connection.class, hiveConnection);
   }
 
   @Test
@@ -77,7 +82,6 @@ public class JobServiceTest extends BaseHiveTest {
     jobCreationRequest.job = new JobImpl();
     jobCreationRequest.job.setQueryId(savedQueryForJob.getId());
 
-
     Response response = jobService.create(jobCreationRequest,
         ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
     ServiceTestUtils.assertHTTPResponseCreated(response);
@@ -113,7 +117,7 @@ public class JobServiceTest extends BaseHiveTest {
   public void createJobNoSource() throws IOException, InterruptedException {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
     expect(hdfsApi.mkdir(anyString())).andReturn(true).anyTimes();
-    HdfsApi.setInstance(context, hdfsApi);
+    jobService.getSharedObjectsFactory().setInstance(HdfsApi.class, hdfsApi);
     replay(hdfsApi);
 
     JobService.JobRequest request = new JobService.JobRequest();
@@ -196,7 +200,7 @@ public class JobServiceTest extends BaseHiveTest {
   private HdfsApiMock setupHdfsApiMock() throws IOException, InterruptedException {
     HdfsApiMock hdfsApiMock = new HdfsApiMock("select * from Z");
     HdfsApi hdfsApi = hdfsApiMock.getHdfsApi();
-    HdfsApi.setInstance(context, hdfsApi);
+    jobService.getSharedObjectsFactory().setInstance(HdfsApi.class, hdfsApi);
     replay(hdfsApi);
     return hdfsApiMock;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
index 0c060ed..1e04dd7 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
@@ -45,8 +45,8 @@ public class LogParserTest {
             "INFO : Ended Job = job_1421248330903_0003";
 
         LogParser p = LogParser.parseLog(log);
-        Assert.assertEquals(1, p.getJobsList().size());
-        Assert.assertEquals("application_1421248330903_0003",(((LogParser.JobId) (p.getJobsList().toArray())[0])
+        Assert.assertEquals(1, p.getAppsList().size());
+        Assert.assertEquals("application_1421248330903_0003",(((LogParser.AppId) (p.getAppsList().toArray())[0])
                                                             .getIdentifier()));
     }
 
@@ -66,8 +66,8 @@ public class LogParserTest {
             "INFO : Map 1: 1/1 Reducer 2: 1/1 ";
 
         LogParser p = LogParser.parseLog(log);
-        Assert.assertEquals(1, p.getJobsList().size());
-        Assert.assertEquals("application_1423156117563_0003",(((LogParser.JobId) (p.getJobsList().toArray())[0])
+        Assert.assertEquals(1, p.getAppsList().size());
+        Assert.assertEquals("application_1423156117563_0003",(((LogParser.AppId) (p.getAppsList().toArray())[0])
             .getIdentifier()));
     }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
index 06d5269..026acc3 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
@@ -86,7 +86,7 @@ public class FileResourceServiceTest extends BaseHiveTest {
   @Test
   public void updateFileResourceItem() {
     Response createdFileResourceItem = doCreateFileResourceItem();
-    Integer createdUdfId = ((FileResourceItem) ((JSONObject) createdFileResourceItem.getEntity()).get("fileResource")).getId();
+    Object createdUdfId = ((FileResourceItem) ((JSONObject) createdFileResourceItem.getEntity()).get("fileResource")).getId();
 
     FileResourceService.ResourceRequest request = new FileResourceService.ResourceRequest();
     request.fileResource = new FileResourceItem();
@@ -108,7 +108,7 @@ public class FileResourceServiceTest extends BaseHiveTest {
   @Test
   public void deleteFileResourceItem() {
     Response createdFileResourceItem = doCreateFileResourceItem();
-    Integer createdUdfId = ((FileResourceItem) ((JSONObject) createdFileResourceItem.getEntity()).get("fileResource")).getId();
+    Object createdUdfId = ((FileResourceItem) ((JSONObject) createdFileResourceItem.getEntity()).get("fileResource")).getId();
 
     Response response = resourceService.delete(String.valueOf(createdUdfId));
     Assert.assertEquals(204, response.getStatus());

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
index d369bb2..9b26a5b 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive.resources.savedQueries;
 import org.apache.ambari.view.hive.HDFSTest;
 import org.apache.ambari.view.hive.utils.HdfsApi;
 import org.apache.ambari.view.hive.utils.NotFoundFormattedException;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.json.simple.JSONObject;
 import org.junit.*;
 import org.junit.rules.ExpectedException;
@@ -43,13 +44,12 @@ public class SavedQueryServiceTest extends HDFSTest {
 
   @BeforeClass
   public static void startUp() throws Exception {
-      HDFSTest.startUp(); // super
+    HDFSTest.startUp(); // super
   }
 
   @AfterClass
   public static void shutDown() throws Exception {
     HDFSTest.shutDown(); // super
-    HdfsApi.dropAllConnections(); //cleanup API connection
   }
 
   @Override
@@ -57,7 +57,13 @@ public class SavedQueryServiceTest extends HDFSTest {
   public void setUp() throws Exception {
     super.setUp();
     savedQueryService = getService(SavedQueryService.class, handler, context);
-    SavedQueryResourceManager.getViewSingletonObjects().clear();
+    savedQueryService.getSharedObjectsFactory().clear();
+  }
+
+  @Override
+  @After
+  public void tearDown() throws Exception {
+    super.tearDown();
   }
 
   @Override
@@ -124,7 +130,7 @@ public class SavedQueryServiceTest extends HDFSTest {
   @Test
   public void update() {
     Response created = doCreateSavedQuery();
-    Integer createdId = ((SavedQuery) ((JSONObject) created.getEntity()).get("savedQuery")).getId();
+    Object createdId = ((SavedQuery) ((JSONObject) created.getEntity()).get("savedQuery")).getId();
 
     SavedQueryService.SavedQueryRequest request = new SavedQueryService.SavedQueryRequest();
     request.savedQuery = new SavedQuery();
@@ -144,7 +150,7 @@ public class SavedQueryServiceTest extends HDFSTest {
   @Test
   public void delete() {
     Response created = doCreateSavedQuery();
-    Integer createdId = ((SavedQuery) ((JSONObject) created.getEntity()).get("savedQuery")).getId();
+    Object createdId = ((SavedQuery) ((JSONObject) created.getEntity()).get("savedQuery")).getId();
 
     Response response = savedQueryService.delete(String.valueOf(createdId));
     Assert.assertEquals(204, response.getStatus());

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/udfs/UDFServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/udfs/UDFServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/udfs/UDFServiceTest.java
index 9271e97..c8b70a8 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/udfs/UDFServiceTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/udfs/UDFServiceTest.java
@@ -86,7 +86,7 @@ public class UDFServiceTest extends BaseHiveTest {
   @Test
   public void updateUDF() {
     Response createdUDF = doCreateUDF();
-    Integer createdUdfId = ((UDF) ((JSONObject) createdUDF.getEntity()).get("udf")).getId();
+    Object createdUdfId = ((UDF) ((JSONObject) createdUDF.getEntity()).get("udf")).getId();
 
     UDFService.UDFRequest request = new UDFService.UDFRequest();
     request.udf = new UDF();
@@ -108,7 +108,7 @@ public class UDFServiceTest extends BaseHiveTest {
   @Test
   public void deleteUDF() {
     Response createdUDF = doCreateUDF();
-    Integer createdUdfId = ((UDF) ((JSONObject) createdUDF.getEntity()).get("udf")).getId();
+    Object createdUdfId = ((UDF) ((JSONObject) createdUDF.getEntity()).get("udf")).getId();
 
     Response response = udfService.delete(String.valueOf(createdUdfId));
     Assert.assertEquals(204, response.getStatus());


[5/6] ambari git commit: AMBARI-10035. Hive View: Retrieve history from ATS (alexantonenko)

Posted by al...@apache.org.
AMBARI-10035. Hive View: Retrieve history from ATS (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/db973127
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/db973127
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/db973127

Branch: refs/heads/trunk
Commit: db97312728bf9c04b3ec7711593fecd04a7554d1
Parents: d786be4
Author: Alex Antonenko <hi...@gmail.com>
Authored: Thu Mar 12 18:59:36 2015 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Thu Mar 12 21:31:20 2015 +0200

----------------------------------------------------------------------
 .../assets/javascripts/modernizr-2.6.2.min.js   |   0
 contrib/views/files/src/main/resources/view.xml |  13 +-
 contrib/views/hive/pom.xml                      |   2 +-
 .../apache/ambari/view/hive/BaseService.java    |  20 +-
 .../apache/ambari/view/hive/HelpService.java    |   2 +-
 .../org/apache/ambari/view/hive/TestBean.java   |   6 +-
 .../ambari/view/hive/client/Connection.java     |  24 +-
 .../view/hive/client/ConnectionFactory.java     |  72 ++++
 .../ambari/view/hive/client/ConnectionPool.java |  81 ----
 .../view/hive/client/IConnectionFactory.java    |  24 ++
 .../view/hive/persistence/DataStoreStorage.java |  32 +-
 .../view/hive/persistence/IStorageFactory.java  |  23 ++
 .../view/hive/persistence/KeyValueStorage.java  |  15 +-
 .../ambari/view/hive/persistence/Storage.java   |   6 +-
 .../persistence/utils/FilteringStrategy.java    |   1 +
 .../view/hive/persistence/utils/Indexed.java    |   4 +-
 .../utils/OnlyOwnersFilteringStrategy.java      |   5 +
 .../hive/persistence/utils/StorageFactory.java  |  69 ++++
 .../hive/persistence/utils/StorageUtil.java     |  94 -----
 .../hive/resources/CRUDResourceManager.java     |  42 +-
 .../view/hive/resources/IResourceManager.java   |  37 ++
 .../resources/PersonalCRUDResourceManager.java  |  20 +-
 .../resources/SharedCRUDResourceManager.java    |  12 +-
 .../resources/browser/HiveBrowserService.java   |  24 +-
 .../view/hive/resources/files/FileService.java  |  47 ++-
 .../view/hive/resources/jobs/Aggregator.java    | 210 ++++++++++
 .../resources/jobs/ConnectionController.java    |  21 +-
 .../jobs/IOperationHandleResourceManager.java   |  36 ++
 .../ambari/view/hive/resources/jobs/Job.java    |  90 ----
 .../view/hive/resources/jobs/JobController.java |  46 ---
 .../resources/jobs/JobControllerFactory.java    |  43 --
 .../hive/resources/jobs/JobControllerImpl.java  | 326 ---------------
 .../view/hive/resources/jobs/JobImpl.java       | 203 ---------
 .../hive/resources/jobs/JobResourceManager.java | 107 -----
 .../resources/jobs/JobResourceProvider.java     |  12 +-
 .../view/hive/resources/jobs/JobService.java    |  37 +-
 .../view/hive/resources/jobs/LogParser.java     |  79 ++--
 .../jobs/OperationHandleController.java         |  22 +-
 .../jobs/OperationHandleControllerFactory.java  |  25 +-
 .../jobs/OperationHandleResourceManager.java    |  41 +-
 .../resources/jobs/StoredOperationHandle.java   |  12 +-
 .../hive/resources/jobs/atsJobs/ATSParser.java  | 139 +++++++
 .../jobs/atsJobs/ATSParserFactory.java          |  42 ++
 .../jobs/atsJobs/ATSRequestsDelegate.java       |  29 ++
 .../jobs/atsJobs/ATSRequestsDelegateImpl.java   |  86 ++++
 .../resources/jobs/atsJobs/HiveQueryId.java     |  37 ++
 .../hive/resources/jobs/atsJobs/IATSParser.java |  29 ++
 .../hive/resources/jobs/atsJobs/TezDagId.java   |  26 ++
 .../jobs/viewJobs/IJobControllerFactory.java    |  23 ++
 .../view/hive/resources/jobs/viewJobs/Job.java  |  99 +++++
 .../resources/jobs/viewJobs/JobController.java  |  46 +++
 .../jobs/viewJobs/JobControllerFactory.java     |  42 ++
 .../jobs/viewJobs/JobControllerImpl.java        | 343 ++++++++++++++++
 .../hive/resources/jobs/viewJobs/JobImpl.java   | 226 ++++++++++
 .../jobs/viewJobs/JobResourceManager.java       | 107 +++++
 .../resources/resources/FileResourceItem.java   |   6 +-
 .../resources/FileResourceResourceManager.java  |   9 +-
 .../resources/FileResourceResourceProvider.java |  10 +-
 .../resources/FileResourceService.java          |  11 +-
 .../hive/resources/savedQueries/SavedQuery.java |   6 +-
 .../savedQueries/SavedQueryResourceManager.java |  42 +-
 .../SavedQueryResourceProvider.java             |  18 +-
 .../savedQueries/SavedQueryService.java         |  10 +-
 .../ambari/view/hive/resources/udfs/UDF.java    |   6 +-
 .../hive/resources/udfs/UDFResourceManager.java |   9 +-
 .../resources/udfs/UDFResourceProvider.java     |  11 +-
 .../view/hive/resources/udfs/UDFService.java    |  15 +-
 .../ambari/view/hive/utils/FilePaginator.java   |   6 +-
 .../apache/ambari/view/hive/utils/HdfsApi.java  |  45 +-
 .../apache/ambari/view/hive/utils/HdfsUtil.java |  17 +-
 .../view/hive/utils/SharedObjectsFactory.java   | 163 ++++++++
 .../hive-web/app/components/typeahead-widget.js |   2 +-
 .../ui/hive-web/app/controllers/history.js      |   9 +-
 .../ui/hive-web/app/controllers/index.js        |  56 ++-
 .../app/controllers/index/history-query/logs.js |  12 +-
 .../controllers/index/history-query/results.js  |   5 +-
 .../ui/hive-web/app/controllers/job.js          |   5 +-
 .../ui/hive-web/app/controllers/open-queries.js |  13 +-
 .../ui/hive-web/app/controllers/queries.js      |   5 +-
 .../ui/hive-web/app/controllers/settings.js     | 129 +++---
 .../ui/hive-web/app/controllers/tez-ui.js       |  22 +
 .../hive-web/app/controllers/visual-explain.js  |  22 +
 .../ui/hive-web/app/helpers/all-uppercase.js    |  25 ++
 .../ui/hive-web/app/initializers/i18n.js        |   7 +-
 .../resources/ui/hive-web/app/models/job.js     |  14 +-
 .../ui/hive-web/app/routes/application.js       |   4 +-
 .../app/routes/index/history-query/index.js     |  13 +-
 .../resources/ui/hive-web/app/styles/app.scss   | 100 +++--
 .../templates/components/_typeahead-widget.hbs  |  17 +
 .../app/templates/components/popover-widget.hbs |   2 +-
 .../app/templates/databases-search-results.hbs  |   2 +-
 .../ui/hive-web/app/templates/databases.hbs     |   4 +-
 .../ui/hive-web/app/templates/history.hbs       |   4 +-
 .../ui/hive-web/app/templates/index.hbs         | 124 +++---
 .../ui/hive-web/app/templates/queries.hbs       |  58 +--
 .../ui/hive-web/app/templates/settings.hbs      |  63 +--
 .../ui/hive-web/app/templates/tez-ui.hbs        |  22 +
 .../hive-web/app/templates/visual-explain.hbs   |  22 +
 .../ui/hive-web/app/utils/constants.js          |  87 +++-
 .../ui/hive-web/app/utils/functions.js          |  19 +-
 .../resources/ui/hive-web/app/views/tez-ui.js   |  35 ++
 .../ui/hive-web/app/views/visual-explain.js     |  35 ++
 .../src/main/resources/ui/hive-web/bower.json   |   8 +-
 .../src/main/resources/ui/hive-web/package.json |  12 +-
 .../tests/integration/query-editor-test.js      |   2 +-
 .../tests/unit/controllers/history-test.js      |   4 +-
 .../tests/unit/controllers/queries-test.js      |  15 -
 .../tests/unit/controllers/settings-test.js     |  97 +++++
 .../tests/unit/controllers/tez-ui-test.js       |  33 ++
 .../unit/controllers/visual-explain-test.js     |  33 ++
 .../tests/unit/views/visual-explain-test.js     |  30 ++
 contrib/views/hive/src/main/resources/view.xml  |  52 ++-
 .../hive/resources/files/FileServiceTest.java   |   7 +-
 .../view/hive/resources/jobs/ATSParserTest.java | 411 +++++++++++++++++++
 .../hive/resources/jobs/AggregatorTest.java     | 396 ++++++++++++++++++
 .../hive/resources/jobs/JobServiceTest.java     |  16 +-
 .../view/hive/resources/jobs/LogParserTest.java |   8 +-
 .../resources/FileResourceServiceTest.java      |   4 +-
 .../savedQueries/SavedQueryServiceTest.java     |  16 +-
 .../hive/resources/udfs/UDFServiceTest.java     |   4 +-
 120 files changed, 4083 insertions(+), 1645 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/files/src/main/resources/ui/app/assets/javascripts/modernizr-2.6.2.min.js
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/assets/javascripts/modernizr-2.6.2.min.js b/contrib/views/files/src/main/resources/ui/app/assets/javascripts/modernizr-2.6.2.min.js
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/files/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/view.xml b/contrib/views/files/src/main/resources/view.xml
index 206128d..eaf1a02 100644
--- a/contrib/views/files/src/main/resources/view.xml
+++ b/contrib/views/files/src/main/resources/view.xml
@@ -19,19 +19,26 @@
     <label>Files</label>
     <version>0.1.0</version>
 
+    <min-ambari-version>1.7.*</min-ambari-version>
+
     <parameter>
         <name>webhdfs.url</name>
-        <description>WebHDFS FileSystem URI (example: webhdfs://namenode:50070)</description>
+        <description>Enter the WebHDFS FileSystem URI. Typically this is the dfs.namenode.http-address property in the hdfs-site.xml configuration. URL must be accessible from Ambari Server.</description>
+        <label>WebHDFS FileSystem URI</label>
+        <placeholder>webhdfs://namenode:50070</placeholder>
         <required>true</required>
     </parameter>
     <parameter>
         <name>webhdfs.username</name>
-        <description>doAs for proxy user for HDFS</description>
+        <description>doAs for proxy user for HDFS. By default, uses the currently logged-in Ambari user.</description>
+        <label>WebHDFS Username</label>
         <required>false</required>
     </parameter>
     <parameter>
         <name>webhdfs.auth</name>
-        <description>Semicolon-separated authentication configs. Default: auth=SIMPLE</description>
+        <description>Semicolon-separated authentication configs.</description>
+        <placeholder>auth=SIMPLE</placeholder>
+        <default-value>auth=SIMPLE</default-value>
         <required>false</required>
     </parameter>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive/pom.xml b/contrib/views/hive/pom.xml
index 0ed6af4..e381719 100644
--- a/contrib/views/hive/pom.xml
+++ b/contrib/views/hive/pom.xml
@@ -195,7 +195,7 @@
         <artifactId>frontend-maven-plugin</artifactId>
         <version>0.0.14</version>
         <configuration>
-          <nodeVersion>v0.10.26</nodeVersion>
+          <nodeVersion>v0.10.32</nodeVersion>
           <npmVersion>1.4.3</npmVersion>
           <workingDirectory>src/main/resources/ui/hive-web/</workingDirectory>
         </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/BaseService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/BaseService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/BaseService.java
index e28193d..a963975 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/BaseService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/BaseService.java
@@ -20,9 +20,8 @@ package org.apache.ambari.view.hive;
 
 import com.google.inject.Inject;
 import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive.persistence.Storage;
-import org.apache.ambari.view.hive.persistence.utils.StorageUtil;
 import org.apache.ambari.view.hive.utils.HdfsApi;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -37,19 +36,16 @@ public class BaseService {
   protected final static Logger LOG =
       LoggerFactory.getLogger(BaseService.class);
 
-  private Storage storage;
-  protected Storage getStorage() {
-    if (storage == null) {
-      storage = StorageUtil.getInstance(context).getStorage();
+  private SharedObjectsFactory sharedObjectsFactory;
+  public SharedObjectsFactory getSharedObjectsFactory() {
+    if (sharedObjectsFactory == null) {
+      sharedObjectsFactory = new SharedObjectsFactory(context);
     }
-    return storage;
+    return sharedObjectsFactory;
   }
 
-  private HdfsApi hdfsApi = null;
-  protected HdfsApi getHdfsApi()  {
-    if (hdfsApi == null)
-      hdfsApi = HdfsApi.getInstance(context);
-    return hdfsApi;
+  public void setSharedObjectsFactory(SharedObjectsFactory sharedObjectsFactory) {
+    this.sharedObjectsFactory = sharedObjectsFactory;
   }
 
   public BaseService() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/HelpService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/HelpService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/HelpService.java
index f975393..870e31a 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/HelpService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/HelpService.java
@@ -66,7 +66,7 @@ public class HelpService extends BaseService {
   public Response testStorage(){
     TestBean test = new TestBean();
     test.someData = "hello world";
-    getStorage().store(TestBean.class, test);
+    getSharedObjectsFactory().getStorage().store(TestBean.class, test);
     return Response.ok("OK").build();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/TestBean.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/TestBean.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/TestBean.java
index fce0177..d298931 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/TestBean.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/TestBean.java
@@ -22,15 +22,15 @@ import org.apache.ambari.view.hive.persistence.utils.Indexed;
 
 public class TestBean implements Indexed {
   public String someData;
-  public Integer id;
+  public String id;
 
   @Override
-  public Integer getId() {
+  public String getId() {
     return id;
   }
 
   @Override
-  public void setId(Integer id) {
+  public void setId(String id) {
     this.id = id;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
index e713aba..44c90ae 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
@@ -18,7 +18,9 @@
 
 package org.apache.ambari.view.hive.client;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hive.service.auth.HiveAuthFactory;
 import org.apache.hive.service.auth.KerberosSaslHelper;
 import org.apache.hive.service.auth.PlainSaslHelper;
@@ -54,11 +56,13 @@ public class Connection {
   private TTransport transport;
 
   private DDLDelegator ddl;
+  private String username;
 
-  public Connection(String host, int port, Map<String, String> authParams) throws HiveClientException {
+  public Connection(String host, int port, Map<String, String> authParams, String username) throws HiveClientException {
     this.host = host;
     this.port = port;
     this.authParams = authParams;
+    this.username = username;
 
     openConnection();
     ddl = new DDLDelegator(this);
@@ -78,11 +82,11 @@ public class Connection {
           + host + ":" + port + ": " + e.toString(), e);
     }
     LOG.info("Hive connection opened");
-    openSession();
   }
 
   /**
    * Based on JDBC implementation of HiveConnection.createBinaryTransport
+   *
    * @return transport
    * @throws HiveClientException
    */
@@ -107,6 +111,11 @@ public class Connection {
           }
           saslProps.put(Sasl.QOP, saslQOP.toString());
           saslProps.put(Sasl.SERVER_AUTH, "true");
+
+          Configuration conf = new Configuration();
+          conf.set("hadoop.security.authentication", "kerberos");
+          UserGroupInformation.setConfiguration(conf);
+
           transport = KerberosSaslHelper.getKerberosTransport(
               authParams.get(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL), host,
               HiveAuthFactory.getSocketTransport(host, port, 10000), saslProps,
@@ -119,7 +128,7 @@ public class Connection {
                 host, HiveAuthFactory.getSocketTransport(host, port, 10000), saslProps);
           } else {
             // we are using PLAIN Sasl connection with user/password
-            String userName = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_USER, Utils.HiveAuthenticationParams.ANONYMOUS_USER);
+            String userName = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_USER, getUsername());
             String passwd = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_PASSWD, Utils.HiveAuthenticationParams.ANONYMOUS_USER);
             // Note: Thrift returns an SSL socket that is already bound to the specified host:port
             // Therefore an open called on this would be a no-op later
@@ -250,6 +259,7 @@ public class Connection {
         public TExecuteStatementResp body() throws HiveClientException {
 
           TExecuteStatementReq execReq = null;
+          openSession();
           execReq = new TExecuteStatementReq(getSessHandle(), oneCmd);
           execReq.setRunAsync(async);
           execReq.setConfOverlay(new HashMap<String, String>()); //maybe it's hive configuration? use it, Luke!
@@ -398,4 +408,12 @@ public class Connection {
   public void setAuthParams(Map<String, String> authParams) {
     this.authParams = authParams;
   }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public void setUsername(String username) {
+    this.username = username;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
new file mode 100644
index 0000000..6886f57
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class ConnectionFactory implements IConnectionFactory {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(ConnectionFactory.class);
+  private ViewContext context;
+
+  public ConnectionFactory(ViewContext context) {
+    this.context = context;
+  }
+
+  @Override
+  public Connection getHiveConnection() {
+    try {
+      return new Connection(getHiveHost(), Integer.valueOf(getHivePort()),
+          getHiveAuthParams(), context.getUsername());
+    } catch (HiveClientException e) {
+      throw new ServiceFormattedException("Couldn't open connection to Hive: " + e.toString(), e);
+    }
+  }
+
+  private String getHiveHost() {
+    return context.getProperties().get("hive.host");
+  }
+
+  private String getHivePort() {
+    return context.getProperties().get("hive.port");
+  }
+
+  private Map<String, String> getHiveAuthParams() {
+    String auth = context.getProperties().get("hive.auth");
+    Map<String, String> params = new HashMap<String, String>();
+    if (auth == null || auth.isEmpty()) {
+      auth = "auth=NOSASL";
+    }
+    for(String param : auth.split(";")) {
+      String[] keyvalue = param.split("=");
+      if (keyvalue.length != 2) {
+        LOG.error("Can not parse authentication param " + param + " in " + auth);
+        continue;
+      }
+      params.put(keyvalue[0], keyvalue[1]);
+    }
+    return params;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionPool.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionPool.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionPool.java
deleted file mode 100644
index 6d07067..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionPool.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.client;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive.utils.ServiceFormattedException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class ConnectionPool {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(ConnectionPool.class);
-
-  private static Map<String, Connection> viewSingletonObjects = new HashMap<String, Connection>();
-  /**
-   * Returns HdfsApi object specific to instance
-   * @param context View Context instance
-   * @return Hdfs business delegate object
-   */
-  public static Connection getConnection(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), connectToHive(context));
-    return viewSingletonObjects.get(context.getInstanceName());
-  }
-
-  private static Connection connectToHive(ViewContext context) {
-    try {
-      return new Connection(getHiveHost(context), Integer.valueOf(getHivePort(context)), getHiveAuthParams(context));
-    } catch (HiveClientException e) {
-      throw new ServiceFormattedException("Couldn't open connection to Hive: " + e.toString(), e);
-    }
-  }
-
-  public static void setInstance(ViewContext context, Connection api) {
-    viewSingletonObjects.put(context.getInstanceName(), api);
-  }
-
-  private static String getHiveHost(ViewContext context) {
-    return context.getProperties().get("hive.host");
-  }
-
-  private static String getHivePort(ViewContext context) {
-    return context.getProperties().get("hive.port");
-  }
-
-  private static Map<String, String> getHiveAuthParams(ViewContext context) {
-    String auth = context.getProperties().get("hive.auth");
-    Map<String, String> params = new HashMap<String, String>();
-    if (auth == null || auth.isEmpty()) {
-      auth = "auth=NOSASL";
-    }
-    for(String param : auth.split(";")) {
-      String[] keyvalue = param.split("=");
-      if (keyvalue.length != 2) {
-        LOG.error("Can not parse authentication param " + param + " in " + auth);
-        continue;
-      }
-      params.put(keyvalue[0], keyvalue[1]);
-    }
-    return params;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/IConnectionFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/IConnectionFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/IConnectionFactory.java
new file mode 100644
index 0000000..7a3df06
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/IConnectionFactory.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+
+public interface IConnectionFactory {
+  Connection getHiveConnection();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/DataStoreStorage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/DataStoreStorage.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/DataStoreStorage.java
index 7a4ff40..b4bc415 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/DataStoreStorage.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/DataStoreStorage.java
@@ -30,7 +30,6 @@ import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.WebApplicationException;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.LinkedList;
 import java.util.List;
 
@@ -55,7 +54,7 @@ public class DataStoreStorage implements Storage {
   public synchronized void store(Class model, Indexed obj) {
     try {
       if (obj.getId() == null) {
-        int id = nextIdForEntity(context, model);
+        String id = nextIdForEntity(context, model);
         obj.setId(id);
       }
       context.getDataStore().store(obj);
@@ -64,7 +63,7 @@ public class DataStoreStorage implements Storage {
     }
   }
 
-  private static synchronized int nextIdForEntity(ViewContext context, Class aClass) {
+  private static synchronized String nextIdForEntity(ViewContext context, Class aClass) {
     // auto increment id implementation
     String lastId = context.getInstanceData(aClass.getName());
     int newId;
@@ -74,12 +73,12 @@ public class DataStoreStorage implements Storage {
       newId = Integer.parseInt(lastId) + 1;
     }
     context.putInstanceData(aClass.getName(), String.valueOf(newId));
-    return newId;
+    return String.valueOf(newId);
   }
 
   @Override
-  public synchronized <T extends Indexed> T load(Class<T> model, Integer id) throws ItemNotFound {
-    LOG.debug(String.format("Loading %s #%d", model.getName(), id));
+  public synchronized <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound {
+    LOG.debug(String.format("Loading %s #%s", model.getName(), id));
     try {
       T obj = context.getDataStore().find(model, id);
       if (obj != null) {
@@ -97,10 +96,9 @@ public class DataStoreStorage implements Storage {
     LinkedList<T> list = new LinkedList<T>();
     LOG.debug(String.format("Loading all %s-s", model.getName()));
     try {
-      for(T item: context.getDataStore().findAll(model, null)) {
-        if ((filter == null) || filter.isConform(item)) {
-          list.add(item);
-        }
+      //TODO: use WHERE statement instead of this ugly filter
+      for(T item: context.getDataStore().findAll(model, filter.whereStatement())) {
+        list.add(item);
       }
     } catch (PersistenceException e) {
       throw new ServiceFormattedException("Error while finding all objects in DataStorage", e);
@@ -124,8 +122,8 @@ public class DataStoreStorage implements Storage {
   }
 
   @Override
-  public synchronized void delete(Class model, int id) throws ItemNotFound {
-    LOG.debug(String.format("Deleting %s:%d", model.getName(), id));
+  public synchronized void delete(Class model, Object id) throws ItemNotFound {
+    LOG.debug(String.format("Deleting %s:%s", model.getName(), id));
     Object obj = load(model, id);
     try {
       context.getDataStore().remove(obj);
@@ -135,7 +133,7 @@ public class DataStoreStorage implements Storage {
   }
 
   @Override
-  public boolean exists(Class model, Integer id) {
+  public boolean exists(Class model, Object id) {
     try {
       return context.getDataStore().find(model, id) != null;
     } catch (PersistenceException e) {
@@ -151,7 +149,7 @@ public class DataStoreStorage implements Storage {
       storage.store(SmokeTestEntity.class, entity);
 
       if (entity.getId() == null) throw new ServiceFormattedException("Ambari Views instance data DB doesn't work properly (auto increment id doesn't work)", null);
-      Integer id = entity.getId();
+      Object id = entity.getId();
       SmokeTestEntity entity2 = storage.load(SmokeTestEntity.class, id);
       boolean status = entity2.getData().compareTo("42") == 0;
       storage.delete(SmokeTestEntity.class, id);
@@ -164,14 +162,14 @@ public class DataStoreStorage implements Storage {
   }
 
   public static class SmokeTestEntity implements Indexed {
-    private Integer id = null;
+    private String id = null;
     private String data = null;
 
-    public Integer getId() {
+    public String getId() {
       return id;
     }
 
-    public void setId(Integer id) {
+    public void setId(String id) {
       this.id = id;
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/IStorageFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/IStorageFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/IStorageFactory.java
new file mode 100644
index 0000000..298d4c8
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/IStorageFactory.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.persistence;
+
+public interface IStorageFactory {
+  Storage getStorage();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/KeyValueStorage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/KeyValueStorage.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/KeyValueStorage.java
index e8a904c..6e88063 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/KeyValueStorage.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/KeyValueStorage.java
@@ -62,7 +62,7 @@ public abstract class KeyValueStorage implements Storage {
       int lastIndex = getConfig().getInt(modelIndexingPropName, 0);
       lastIndex ++;
       getConfig().setProperty(modelIndexingPropName, lastIndex);
-      obj.setId(lastIndex);
+      obj.setId(String.valueOf(lastIndex));
     }
 
     String modelPropName = getItemPropertyName(model, obj.getId());
@@ -71,12 +71,13 @@ public abstract class KeyValueStorage implements Storage {
   }
 
   @Override
-  public <T extends Indexed> T load(Class<T> model, Integer id) throws ItemNotFound {
+  public <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound {
     String modelPropName = getItemPropertyName(model, id);
     LOG.debug(String.format("Loading %s", modelPropName));
     if (getConfig().containsKey(modelPropName)) {
       String json = read(modelPropName);
       LOG.debug(String.format("json: %s", json));
+
       return deserialize(model, json);
     } else {
       throw new ItemNotFound();
@@ -141,14 +142,14 @@ public abstract class KeyValueStorage implements Storage {
   }
 
   @Override
-  public synchronized void delete(Class model, int id) {
-    LOG.debug(String.format("Deleting %s:%d", model.getName(), id));
+  public synchronized void delete(Class model, Object id) {
+    LOG.debug(String.format("Deleting %s:%s", model.getName(), id));
     String modelPropName = getItemPropertyName(model, id);
     clear(modelPropName);
   }
 
   @Override
-  public boolean exists(Class model, Integer id) {
+  public boolean exists(Class model, Object id) {
     return getConfig().containsKey(getItemPropertyName(model, id));
   }
 
@@ -156,7 +157,7 @@ public abstract class KeyValueStorage implements Storage {
     return String.format("%s:index", model.getName());
   }
 
-  private String getItemPropertyName(Class model, int id) {
-    return String.format("%s.%d", model.getName(), id);
+  private String getItemPropertyName(Class model, Object id) {
+    return String.format("%s.%s", model.getName(), id);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/Storage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/Storage.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/Storage.java
index fb75670..188282e 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/Storage.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/Storage.java
@@ -41,7 +41,7 @@ public interface Storage {
    * @return bean instance
    * @throws ItemNotFound thrown if item with id was not found in DB
    */
-  <T extends Indexed> T load(Class<T> model, Integer id) throws ItemNotFound;
+  <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound;
 
   /**
    * Load all objects of given bean class
@@ -74,7 +74,7 @@ public interface Storage {
    * @param model bean class
    * @param id identifier
    */
-  void delete(Class model, int id) throws ItemNotFound;
+  void delete(Class model, Object id) throws ItemNotFound;
 
   /**
    * Check is object exists
@@ -82,5 +82,5 @@ public interface Storage {
    * @param id identifier
    * @return true if exists
    */
-  boolean exists(Class model, Integer id);
+  boolean exists(Class model, Object id);
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/FilteringStrategy.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/FilteringStrategy.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/FilteringStrategy.java
index 79aa2f9..eba572e 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/FilteringStrategy.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/FilteringStrategy.java
@@ -28,4 +28,5 @@ public interface FilteringStrategy {
    * @return true if item conforms this filter
    */
   boolean isConform(Indexed item);
+  String whereStatement();
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/Indexed.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/Indexed.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/Indexed.java
index 1d80920..82b7d57 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/Indexed.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/Indexed.java
@@ -26,11 +26,11 @@ public interface Indexed {
    * Get the ID
    * @return ID
    */
-  Integer getId();
+  String getId();
 
   /**
    * Set ID
    * @param id ID
    */
-  void setId(Integer id);
+  void setId(String id);
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/OnlyOwnersFilteringStrategy.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/OnlyOwnersFilteringStrategy.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/OnlyOwnersFilteringStrategy.java
index 9f2b4a2..620f440 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/OnlyOwnersFilteringStrategy.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/OnlyOwnersFilteringStrategy.java
@@ -30,4 +30,9 @@ public class OnlyOwnersFilteringStrategy implements FilteringStrategy {
     Owned object = (Owned) item;
     return object.getOwner().compareTo(username) == 0;
   }
+
+  @Override
+  public String whereStatement() {
+    return "owner = '" + username + "'";
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/StorageFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/StorageFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/StorageFactory.java
new file mode 100644
index 0000000..88a6d66
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/StorageFactory.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.persistence.utils;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.DataStoreStorage;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
+import org.apache.ambari.view.hive.persistence.LocalKeyValueStorage;
+import org.apache.ambari.view.hive.persistence.Storage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Storage factory, creates storage of Local or Persistence API type.
+ * Type depends on context configuration: if "dataworker.storagePath" is set,
+ * storage of Local type will be created.  Otherwise, Persistence API will be used.
+ *
+ * Storage is singleton.
+ */
+public class StorageFactory implements IStorageFactory {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(StorageFactory.class);
+
+  private ViewContext context;
+
+  /**
+   * Constructor of storage factory
+   * @param context View Context instance
+   */
+  public StorageFactory(ViewContext context) {
+    this.context = context;
+  }
+
+  /**
+   * Creates storage instance
+   * @return storage instance
+   */
+  public Storage getStorage() {
+    String fileName = context.getProperties().get("dataworker.storagePath");
+
+    Storage storageInstance;
+    if (fileName != null) {
+      LOG.debug("Using local storage in " + fileName + " to store data");
+      // If specifed, use LocalKeyValueStorage - key-value file based storage
+      storageInstance = new LocalKeyValueStorage(context);
+    } else {
+      LOG.debug("Using Persistence API to store data");
+      // If not specifed, use ambari-views Persistence API
+      storageInstance = new DataStoreStorage(context);
+    }
+    return storageInstance;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/StorageUtil.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/StorageUtil.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/StorageUtil.java
deleted file mode 100644
index 1e67c02..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/StorageUtil.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.persistence.utils;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive.persistence.DataStoreStorage;
-import org.apache.ambari.view.hive.persistence.LocalKeyValueStorage;
-import org.apache.ambari.view.hive.persistence.Storage;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Storage factory, creates storage of Local or Persistence API type.
- * Type depends on context configuration: if "dataworker.storagePath" is set,
- * storage of Local type will be created.  Otherwise, Persistence API will be used.
- *
- * Storage is singleton.
- */
-public class StorageUtil {
-  private Storage storageInstance = null;
-
-  protected final static Logger LOG =
-      LoggerFactory.getLogger(StorageUtil.class);
-
-
-  private static Map<String, StorageUtil> viewSingletonObjects = new HashMap<String, StorageUtil>();
-  public static StorageUtil getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), new StorageUtil(context));
-    return viewSingletonObjects.get(context.getInstanceName());
-  }
-
-  public static void dropAllConnections() {
-    viewSingletonObjects.clear();
-  }
-
-  private ViewContext context;
-
-  /**
-   * Constructor of storage util
-   * @param context View Context instance
-   */
-  public StorageUtil(ViewContext context) {
-    this.context = context;
-  }
-
-  /**
-   * Get storage instance. If one is not created, creates instance.
-   * @return storage instance
-   */
-  public synchronized Storage getStorage() {
-    if (storageInstance == null) {
-      String fileName = context.getProperties().get("dataworker.storagePath");
-      if (fileName != null) {
-        LOG.debug("Using local storage in " + fileName + " to store data");
-        // If specifed, use LocalKeyValueStorage - key-value file based storage
-        storageInstance = new LocalKeyValueStorage(context);
-      } else {
-        LOG.debug("Using Persistence API to store data");
-        // If not specifed, use ambari-views Persistence API
-        storageInstance = new DataStoreStorage(context);
-      }
-    }
-    return storageInstance;
-  }
-
-  /**
-   * Set storage to use across all application.
-   * Used in unit tests.
-   * @param storage storage instance
-   */
-  public void setStorage(Storage storage) {
-    storageInstance = storage;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/CRUDResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/CRUDResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/CRUDResourceManager.java
index b8dec17..28cf4d1 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/CRUDResourceManager.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/CRUDResourceManager.java
@@ -18,12 +18,11 @@
 
 package org.apache.ambari.view.hive.resources;
 
-import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.Storage;
 import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive.persistence.utils.Indexed;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.hive.persistence.utils.StorageUtil;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 
 import java.util.List;
@@ -32,18 +31,20 @@ import java.util.List;
  * CRUD resource manager
  * @param <T> Data type with ID
  */
-abstract public class CRUDResourceManager<T extends Indexed> {
+abstract public class CRUDResourceManager<T extends Indexed> implements IResourceManager<T> {
   //TODO: refactor: generic parameter gets Fabric for Indexed objects, not objects itself
   private Storage storage = null;
 
   protected final Class<? extends T> resourceClass;
+  protected IStorageFactory storageFabric;
 
   /**
    * Constructor
    * @param resourceClass model class
    */
-  public CRUDResourceManager(Class<? extends T> resourceClass) {
+  public CRUDResourceManager(Class<? extends T> resourceClass, IStorageFactory storageFabric) {
     this.resourceClass = resourceClass;
+    this.storageFabric = storageFabric;
   }
   // CRUD operations
 
@@ -52,7 +53,8 @@ abstract public class CRUDResourceManager<T extends Indexed> {
    * @param object object
    * @return model object
    */
-  protected T create(T object) {
+  @Override
+  public T create(T object) {
     object.setId(null);
     return this.save(object);
   }
@@ -63,9 +65,10 @@ abstract public class CRUDResourceManager<T extends Indexed> {
    * @return model object
    * @throws org.apache.ambari.view.hive.persistence.utils.ItemNotFound
    */
-  protected T read(Integer id) throws ItemNotFound {
+  @Override
+  public T read(Object id) throws ItemNotFound {
     T object = null;
-    object = getStorage().load(this.resourceClass, id);
+    object = storageFabric.getStorage().load(this.resourceClass, id);
     if (!checkPermissions(object))
       throw new ItemNotFound();
     return object;
@@ -76,8 +79,9 @@ abstract public class CRUDResourceManager<T extends Indexed> {
    * @param filteringStrategy filtering strategy
    * @return list of filtered objects
    */
-  protected List<T> readAll(FilteringStrategy filteringStrategy) {
-    return getStorage().loadAll(this.resourceClass, filteringStrategy);
+  @Override
+  public List<T> readAll(FilteringStrategy filteringStrategy) {
+    return storageFabric.getStorage().loadAll(this.resourceClass, filteringStrategy);
   }
 
   /**
@@ -87,7 +91,8 @@ abstract public class CRUDResourceManager<T extends Indexed> {
    * @return model object
    * @throws org.apache.ambari.view.hive.persistence.utils.ItemNotFound
    */
-  protected T update(T newObject, Integer id) throws ItemNotFound {
+  @Override
+  public T update(T newObject, String id) throws ItemNotFound {
     newObject.setId(id);
     this.save(newObject);
     return newObject;
@@ -98,29 +103,22 @@ abstract public class CRUDResourceManager<T extends Indexed> {
    * @param resourceId object identifier
    * @throws org.apache.ambari.view.hive.persistence.utils.ItemNotFound
    */
-  protected void delete(Integer resourceId) throws ItemNotFound {
-    if (!getStorage().exists(this.resourceClass, resourceId)) {
+  @Override
+  public void delete(Object resourceId) throws ItemNotFound {
+    if (!storageFabric.getStorage().exists(this.resourceClass, resourceId)) {
       throw new ItemNotFound();
     }
-    getStorage().delete(this.resourceClass, resourceId);
+    storageFabric.getStorage().delete(this.resourceClass, resourceId);
   }
 
   // UTILS
 
   protected T save(T object) {
-    getStorage().store(resourceClass, object);
+    storageFabric.getStorage().store(resourceClass, object);
     return object;
   }
 
-  protected Storage getStorage() {
-    if (storage == null) {
-      storage = StorageUtil.getInstance(getContext()).getStorage();
-    }
-    return storage;
-  }
-
   protected abstract boolean checkPermissions(T object);
-  protected abstract ViewContext getContext();
 
   protected void cleanupAfterErrorAndThrowAgain(Indexed object, ServiceFormattedException e) {
     try {

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/IResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/IResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/IResourceManager.java
new file mode 100644
index 0000000..222d695
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/IResourceManager.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources;
+
+import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive.persistence.utils.Indexed;
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+
+import java.util.List;
+
+public interface IResourceManager<T extends Indexed> {
+  T create(T object);
+
+  T read(Object id) throws ItemNotFound;
+
+  List<T> readAll(FilteringStrategy filteringStrategy);
+
+  T update(T newObject, String id) throws ItemNotFound;
+
+  void delete(Object resourceId) throws ItemNotFound;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java
index d1223fb..15fcf22 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java
@@ -19,6 +19,7 @@
 package org.apache.ambari.view.hive.resources;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.PersonalResource;
 import org.slf4j.Logger;
@@ -31,24 +32,24 @@ import java.util.concurrent.Callable;
  * @param <T> Data type with ID and Owner
  */
 public class PersonalCRUDResourceManager<T extends PersonalResource> extends CRUDResourceManager<T> {
-  protected ViewContext context;
   protected boolean ignorePermissions = false;
 
   private final static Logger LOG =
       LoggerFactory.getLogger(PersonalCRUDResourceManager.class);
+  protected ViewContext context;
+
   /**
    * Constructor
    * @param resourceClass model class
-   * @param context View Context instance
    */
-  public PersonalCRUDResourceManager(Class<? extends T> resourceClass, ViewContext context) {
-    super(resourceClass);
+  public PersonalCRUDResourceManager(Class<? extends T> resourceClass, IStorageFactory storageFabric, ViewContext context) {
+    super(resourceClass, storageFabric);
     this.context = context;
   }
 
   @Override
-  public T update(T newObject, Integer id) throws ItemNotFound {
-    T object = getStorage().load(this.resourceClass, id);
+  public T update(T newObject, String id) throws ItemNotFound {
+    T object = storageFabric.getStorage().load(this.resourceClass, id);
     if (object.getOwner().compareTo(this.context.getUsername()) != 0) {
       throw new ItemNotFound();
     }
@@ -74,18 +75,13 @@ public class PersonalCRUDResourceManager<T extends PersonalResource> extends CRU
     return object.getOwner().compareTo(this.context.getUsername()) == 0;
   }
 
-  @Override
-  public ViewContext getContext() {
-    return context;
-  }
-
   /**
    * Execute action ignoring objects owner
    * @param actions callable to execute
    * @return value returned from actions
    * @throws Exception
    */
-  public <T> T ignorePermissions(Callable<T> actions) throws Exception {
+  public T ignorePermissions(Callable<T> actions) throws Exception {
     ignorePermissions = true;
     T result;
     try {

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/SharedCRUDResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/SharedCRUDResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/SharedCRUDResourceManager.java
index 0d2b297..9c4ca36 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/SharedCRUDResourceManager.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/SharedCRUDResourceManager.java
@@ -19,6 +19,7 @@
 package org.apache.ambari.view.hive.resources;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.Indexed;
 
 /**
@@ -31,20 +32,13 @@ public class SharedCRUDResourceManager<T extends Indexed> extends CRUDResourceMa
   /**
    * Constructor
    * @param responseClass model class
-   * @param context View Context instance
    */
-  public SharedCRUDResourceManager(Class<T> responseClass, ViewContext context) {
-    super(responseClass);
-    this.context = context;
+  public SharedCRUDResourceManager(Class<T> responseClass, IStorageFactory storageFabric) {
+    super(responseClass, storageFabric);
   }
 
   @Override
   protected boolean checkPermissions(T object) {
     return true; //everyone has permission
   }
-
-  @Override
-  protected ViewContext getContext() {
-    return context;
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java
index e5983b9..bb1a0a2 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java
@@ -21,13 +21,13 @@ package org.apache.ambari.view.hive.resources.browser;
 import com.google.inject.Inject;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
-import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.client.ColumnDescription;
-import org.apache.ambari.view.hive.client.ConnectionPool;
 import org.apache.ambari.view.hive.client.Cursor;
+import org.apache.ambari.view.hive.client.IConnectionFactory;
 import org.apache.ambari.view.hive.resources.jobs.ResultsPaginationController;
 import org.apache.ambari.view.hive.utils.BadRequestFormattedException;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.apache.commons.collections4.map.PassiveExpiringMap;
 import org.json.simple.JSONObject;
 import org.slf4j.Logger;
@@ -55,6 +55,8 @@ public class HiveBrowserService {
 
   private static final long EXPIRING_TIME = 10*60*1000;  // 10 minutes
   private static Map<String, Cursor> resultsCache;
+  private IConnectionFactory connectionFactory;
+
   public static Map<String, Cursor> getResultsCache() {
     if (resultsCache == null) {
       PassiveExpiringMap<String, Cursor> resultsCacheExpiringMap =
@@ -64,6 +66,12 @@ public class HiveBrowserService {
     return resultsCache;
   }
 
+  private IConnectionFactory getConnectionFactory() {
+    if (connectionFactory == null)
+      connectionFactory = new SharedObjectsFactory(context);
+    return new SharedObjectsFactory(context);
+  }
+
   /**
    * Returns list of databases
    */
@@ -81,7 +89,7 @@ public class HiveBrowserService {
     String curl = null;
     try {
       JSONObject response = new JSONObject();
-      List<String> tables = ConnectionPool.getConnection(context).ddl().getDBList(like);
+      List<String> tables = getConnectionFactory().getHiveConnection().ddl().getDBList(like);
       response.put("databases", tables);
       return Response.ok(response).build();
     } catch (WebApplicationException ex) {
@@ -116,7 +124,7 @@ public class HiveBrowserService {
               new Callable<Cursor>() {
                 @Override
                 public Cursor call() throws Exception {
-                  return ConnectionPool.getConnection(context).ddl().getDBListCursor(finalLike);
+                  return getConnectionFactory().getHiveConnection().ddl().getDBListCursor(finalLike);
                 }
               }).build();
     } catch (WebApplicationException ex) {
@@ -146,7 +154,7 @@ public class HiveBrowserService {
     String curl = null;
     try {
       JSONObject response = new JSONObject();
-      List<String> tables = ConnectionPool.getConnection(context).ddl().getTableList(db, like);
+      List<String> tables = getConnectionFactory().getHiveConnection().ddl().getTableList(db, like);
       response.put("tables", tables);
       response.put("database", db);
       return Response.ok(response).build();
@@ -183,7 +191,7 @@ public class HiveBrowserService {
               new Callable<Cursor>() {
                 @Override
                 public Cursor call() throws Exception {
-                  Cursor cursor = ConnectionPool.getConnection(context).ddl().getTableListCursor(db, finalLike);
+                  Cursor cursor = getConnectionFactory().getHiveConnection().ddl().getTableListCursor(db, finalLike);
                   cursor.selectColumns(requestedColumns);
                   return cursor;
                 }
@@ -212,7 +220,7 @@ public class HiveBrowserService {
     String curl = null;
     try {
       JSONObject response = new JSONObject();
-      List<ColumnDescription> columnDescriptions = ConnectionPool.getConnection(context).ddl()
+      List<ColumnDescription> columnDescriptions = getConnectionFactory().getHiveConnection().ddl()
           .getTableDescription(db, table, like, extendedTableDescription);
       response.put("columns", columnDescriptions);
       response.put("database", db);
@@ -247,7 +255,7 @@ public class HiveBrowserService {
               new Callable<Cursor>() {
                 @Override
                 public Cursor call() throws Exception {
-                  Cursor cursor = ConnectionPool.getConnection(context).ddl().getTableDescriptionCursor(db, table, like);
+                  Cursor cursor = getConnectionFactory().getHiveConnection().ddl().getTableDescriptionCursor(db, table, like);
                   cursor.selectColumns(requestedColumns);
                   return cursor;
                 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java
index 8d886d5..3f5b3b8 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java
@@ -23,6 +23,7 @@ import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.utils.*;
+import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.json.simple.JSONObject;
@@ -51,6 +52,8 @@ import java.io.IOException;
  *      update file content
  */
 public class FileService extends BaseService {
+  public static final String FAKE_FILE = "fakefile://";
+
   @Inject
   ViewResourceHandler handler;
 
@@ -66,17 +69,23 @@ public class FileService extends BaseService {
   public Response getFilePage(@PathParam("filePath") String filePath, @QueryParam("page") Long page) throws IOException, InterruptedException {
     LOG.debug("Reading file " + filePath);
     try {
-      FilePaginator paginator = new FilePaginator(filePath, context);
+      FileResource file = new FileResource();
 
       if (page == null)
         page = 0L;
 
-      FileResource file = new FileResource();
-      file.setFilePath(filePath);
-      file.setFileContent(paginator.readPage(page));
-      file.setHasNext(paginator.pageCount() > page + 1);
-      file.setPage(page);
-      file.setPageCount(paginator.pageCount());
+      if (filePath.startsWith(FAKE_FILE)) {
+        if (page > 1)
+          throw new IllegalArgumentException("There's only one page in fake files");
+
+        String content = filePath.substring(FAKE_FILE.length());
+
+        fillFakeFileObject(filePath, file, content);
+      } else {
+        FilePaginator paginator = new FilePaginator(filePath, getSharedObjectsFactory().getHdfsApi());
+
+        fillRealFileObject(filePath, page, file, paginator);
+      }
 
       JSONObject object = new JSONObject();
       object.put("file", file);
@@ -92,6 +101,24 @@ public class FileService extends BaseService {
     }
   }
 
+  public void fillRealFileObject(String filePath, Long page, FileResource file, FilePaginator paginator) throws IOException, InterruptedException {
+    file.setFilePath(filePath);
+    file.setFileContent(paginator.readPage(page));
+    file.setHasNext(paginator.pageCount() > page + 1);
+    file.setPage(page);
+    file.setPageCount(paginator.pageCount());
+  }
+
+  public void fillFakeFileObject(String filePath, FileResource file, String encodedContent) {
+    String content = new String(Base64.decodeBase64(encodedContent));
+
+    file.setFilePath(filePath);
+    file.setFileContent(content);
+    file.setHasNext(false);
+    file.setPage(0);
+    file.setPageCount(1);
+  }
+
   /**
    * Delete single item
    */
@@ -100,7 +127,7 @@ public class FileService extends BaseService {
   public Response deleteFile(@PathParam("filePath") String filePath) throws IOException, InterruptedException {
     try {
       LOG.debug("Deleting file " + filePath);
-      if (getHdfsApi().delete(filePath, false)) {
+      if (getSharedObjectsFactory().getHdfsApi().delete(filePath, false)) {
         return Response.status(204).build();
       }
       throw new NotFoundFormattedException("FileSystem.delete returned false", null);
@@ -121,7 +148,7 @@ public class FileService extends BaseService {
                              @PathParam("filePath") String filePath) throws IOException, InterruptedException {
     try {
       LOG.debug("Rewriting file " + filePath);
-      FSDataOutputStream output = getHdfsApi().create(filePath, true);
+      FSDataOutputStream output = getSharedObjectsFactory().getHdfsApi().create(filePath, true);
       output.writeBytes(request.file.getFileContent());
       output.close();
       return Response.status(204).build();
@@ -143,7 +170,7 @@ public class FileService extends BaseService {
     try {
       LOG.debug("Creating file " + request.file.getFilePath());
       try {
-        FSDataOutputStream output = getHdfsApi().create(request.file.getFilePath(), false);
+        FSDataOutputStream output = getSharedObjectsFactory().getHdfsApi().create(request.file.getFilePath(), false);
         if (request.file.getFileContent() != null) {
           output.writeBytes(request.file.getFileContent());
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Aggregator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Aggregator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Aggregator.java
new file mode 100644
index 0000000..ce82e15
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Aggregator.java
@@ -0,0 +1,210 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs;
+
+import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive.persistence.utils.Indexed;
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive.resources.IResourceManager;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.HiveQueryId;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.TezDagId;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl;
+import org.apache.commons.beanutils.PropertyUtils;
+import org.apache.commons.codec.binary.Base64;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * View Jobs and ATS Jobs aggregator
+ * Not all ViewJobs create ATS job
+ */
+public class Aggregator {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(Aggregator.class);
+
+  private final IATSParser ats;
+  private final IOperationHandleResourceManager operationHandleResourceManager;
+  private IResourceManager<Job> viewJobResourceManager;
+
+  public Aggregator(IResourceManager<Job> jobResourceManager,
+                    IOperationHandleResourceManager operationHandleResourceManager,
+                    IATSParser ats) {
+    this.viewJobResourceManager = jobResourceManager;
+    this.operationHandleResourceManager = operationHandleResourceManager;
+    this.ats = ats;
+  }
+
+  public List<Job> readAll(String username) {
+      Set<String> addedOperationIds = new HashSet<String>();
+
+    List<Job> allJobs = new LinkedList<Job>();
+    for (HiveQueryId atsHiveQuery : ats.getHiveQuieryIdsList(username)) {
+
+      TezDagId atsTezDag;
+      if (atsHiveQuery.dagNames != null && atsHiveQuery.dagNames.size() > 0) {
+        String dagName = atsHiveQuery.dagNames.get(0);
+
+        atsTezDag = ats.getTezDAGByName(dagName);
+      } else {
+        atsTezDag = new TezDagId();
+      }
+
+      JobImpl atsJob;
+      if (hasOperationId(atsHiveQuery)) {
+        try {
+          Job viewJob = getJobByOperationId(urlSafeBase64ToHexString(atsHiveQuery.operationId));
+          saveJobInfoIfNeeded(atsHiveQuery, atsTezDag, viewJob);
+
+          atsJob = mergeAtsJobWithViewJob(atsHiveQuery, atsTezDag, viewJob);
+        } catch (ItemNotFound itemNotFound) {
+          // Executed from HS2, but outside of Hive View
+          atsJob = atsOnlyJob(atsHiveQuery, atsTezDag);
+        }
+      } else {
+        atsJob = atsOnlyJob(atsHiveQuery, atsTezDag);
+      }
+      allJobs.add(atsJob);
+
+      addedOperationIds.add(atsHiveQuery.operationId);
+    }
+
+    //cover case when operationId is present, but not exists in ATS
+    //e.g. optimized queries without executing jobs, like "SELECT * FROM TABLE"
+    for (Job job : viewJobResourceManager.readAll(new OnlyOwnersFilteringStrategy(username))) {
+      List<StoredOperationHandle> operationHandles = operationHandleResourceManager.readJobRelatedHandles(job);
+      assert operationHandles.size() <= 1;
+
+      if (operationHandles.size() > 0) {
+        StoredOperationHandle operationHandle = operationHandles.get(0);
+
+        if (!addedOperationIds.contains(hexStringToUrlSafeBase64(operationHandle.getGuid()))) {
+          //e.g. query without hadoop job: select * from table
+          allJobs.add(job);
+        }
+      }
+    }
+
+    return allJobs;
+  }
+
+  protected boolean hasOperationId(HiveQueryId atsHiveQuery) {
+    return atsHiveQuery.operationId != null;
+  }
+
+  protected JobImpl mergeAtsJobWithViewJob(HiveQueryId atsHiveQuery, TezDagId atsTezDag, Job viewJob) {
+    JobImpl atsJob;
+    try {
+      atsJob = new JobImpl(PropertyUtils.describe(viewJob));
+    }catch(IllegalAccessException e){
+      LOG.error("Can't instantiate JobImpl", e);
+      return null;
+    }catch(InvocationTargetException e){
+      LOG.error("Can't instantiate JobImpl", e);
+      return null;
+    }catch(NoSuchMethodException e){
+      LOG.error("Can't instantiate JobImpl", e);
+      return null;
+    }
+    fillAtsJobFields(atsJob, atsHiveQuery, atsTezDag);
+    return atsJob;
+  }
+
+  protected void saveJobInfoIfNeeded(HiveQueryId hiveQueryId, TezDagId tezDagId, Job viewJob) throws ItemNotFound {
+    if (viewJob.getDagName() == null) {
+      viewJob.setDagName(tezDagId.dagName);
+      viewJobResourceManager.update(viewJob, viewJob.getId());
+    }
+    if (viewJob.getStatus().equals(tezDagId.status)) {
+      viewJob.setStatus(tezDagId.status);
+      viewJobResourceManager.update(viewJob, viewJob.getId());
+    }
+  }
+
+  protected JobImpl atsOnlyJob(HiveQueryId atsHiveQuery, TezDagId atsTezDag) {
+    JobImpl atsJob = new JobImpl();
+    atsJob.setId(atsHiveQuery.entity);
+    fillAtsJobFields(atsJob, atsHiveQuery, atsTezDag);
+
+    String query = atsHiveQuery.query;
+    atsJob.setTitle(query.substring(0, (query.length() > 42)?42:query.length()));
+
+    atsJob.setQueryFile("fakefile://" + Base64.encodeBase64URLSafeString(query.getBytes()));  // fake queryFile
+    return atsJob;
+  }
+
+  protected JobImpl fillAtsJobFields(JobImpl atsJob, HiveQueryId atsHiveQuery, TezDagId atsTezDag) {
+    atsJob.setApplicationId(atsTezDag.applicationId);
+
+    atsJob.setDagName(atsTezDag.dagName);
+    if (!atsTezDag.status.equals(TezDagId.STATUS_UNKNOWN))
+      atsJob.setStatus(atsTezDag.status);
+    if (atsHiveQuery.starttime != 0)
+      atsJob.setDateSubmitted(atsHiveQuery.starttime);
+    atsJob.setDuration(atsHiveQuery.duration);
+    return atsJob;
+  }
+
+  protected Job getJobByOperationId(final String opId) throws ItemNotFound {
+    List<StoredOperationHandle> operationHandles = operationHandleResourceManager.readAll(new FilteringStrategy() {
+      @Override
+      public boolean isConform(Indexed item) {
+        StoredOperationHandle opHandle = (StoredOperationHandle) item;
+        return opHandle.getGuid().equals(opId);
+      }
+
+      @Override
+      public String whereStatement() {
+        return "guid='" + opId + "'";
+      }
+    });
+
+    if (operationHandles.size() != 1)
+      throw new ItemNotFound();
+
+    return viewJobResourceManager.read(operationHandles.get(0).getJobId());
+  }
+
+  protected static String urlSafeBase64ToHexString(String urlsafeBase64){
+    byte[] decoded = Base64.decodeBase64(urlsafeBase64);
+
+    StringBuilder sb = new StringBuilder();
+    for(byte b : decoded){
+      sb.append(String.format("%02x", b));
+    }
+    return sb.toString();
+  }
+
+  protected static String hexStringToUrlSafeBase64(String hexString){
+    byte[] decoded = new byte[hexString.length() / 2];
+
+    for(int i=0; i<hexString.length(); i+=2) {
+       decoded[i / 2] = (byte) Integer.parseInt(String.format("%c%c", hexString.charAt(i), hexString.charAt(i+1)), 16);
+    }
+    return Base64.encodeBase64URLSafeString(decoded);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ConnectionController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ConnectionController.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ConnectionController.java
index 8f143e7..b70abe2 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ConnectionController.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ConnectionController.java
@@ -18,32 +18,19 @@
 
 package org.apache.ambari.view.hive.resources.jobs;
 
-import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive.client.Connection;
-import org.apache.ambari.view.hive.client.ConnectionPool;
 import org.apache.ambari.view.hive.client.HiveClientException;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 import org.apache.hive.service.cli.thrift.TOperationHandle;
 
-import java.util.HashMap;
-import java.util.Map;
 
 public class ConnectionController {
-  private ViewContext context;
-  private Connection connection;
   private OperationHandleControllerFactory operationHandleControllerFactory;
+  private Connection connection;
 
-  private ConnectionController(ViewContext context) {
-    this.context = context;
-    connection = ConnectionPool.getConnection(context);
-    operationHandleControllerFactory = OperationHandleControllerFactory.getInstance(context);
-  }
-
-  private static Map<String, ConnectionController> viewSingletonObjects = new HashMap<String, ConnectionController>();
-  public static ConnectionController getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), new ConnectionController(context));
-    return viewSingletonObjects.get(context.getInstanceName());
+  public ConnectionController(OperationHandleControllerFactory operationHandleControllerFactory, Connection connection) {
+    this.connection = connection;
+    this.operationHandleControllerFactory = operationHandleControllerFactory;
   }
 
   public void selectDatabase(String database) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/IOperationHandleResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/IOperationHandleResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/IOperationHandleResourceManager.java
new file mode 100644
index 0000000..185402e
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/IOperationHandleResourceManager.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs;
+
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.resources.IResourceManager;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
+import org.apache.hive.service.cli.thrift.TOperationHandle;
+
+import java.util.List;
+
+public interface IOperationHandleResourceManager extends IResourceManager<StoredOperationHandle> {
+  List<StoredOperationHandle> readJobRelatedHandles(Job job);
+
+  void putHandleForJob(TOperationHandle h, Job job);
+
+  boolean containsHandleForJob(Job job);
+
+  TOperationHandle getHandleForJob(Job job) throws ItemNotFound;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Job.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Job.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Job.java
deleted file mode 100644
index e6f7a63..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Job.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.jobs;
-
-import org.apache.ambari.view.hive.persistence.utils.Indexed;
-import org.apache.ambari.view.hive.persistence.utils.PersonalResource;
-
-import java.io.Serializable;
-
-/**
- * Interface for Job bean to create Proxy for it
- */
-public interface Job extends Serializable,Indexed,PersonalResource {
-  public static final String JOB_STATE_UNKNOWN = "Unknown";
-  public static final String JOB_STATE_INITIALIZED = "Initialized";
-  public static final String JOB_STATE_RUNNING = "Running";
-  public static final String JOB_STATE_FINISHED = "Finished";
-  public static final String JOB_STATE_CANCELED = "Canceled";
-  public static final String JOB_STATE_CLOSED = "Closed";
-  public static final String JOB_STATE_ERROR = "Error";
-  public static final String JOB_STATE_PENDING = "Pending";
-
-  Integer getId();
-
-  void setId(Integer id);
-
-  String getOwner();
-
-  void setOwner(String owner);
-
-  String getTitle();
-
-  void setTitle(String title);
-
-  String getQueryFile();
-
-  void setQueryFile(String queryFile);
-
-  Long getDateSubmitted();
-
-  void setDateSubmitted(Long dateSubmitted);
-
-  Long getDuration();
-
-  void setDuration(Long duration);
-
-  String getStatus();
-
-  void setStatus(String status);
-
-  String getForcedContent();
-
-  void setForcedContent(String forcedContent);
-
-  Integer getQueryId();
-
-  void setQueryId(Integer queryId);
-
-  String getStatusDir();
-
-  void setStatusDir(String statusDir);
-
-  String getDataBase();
-
-  void setDataBase(String dataBase);
-
-  String getLogFile();
-
-  void setLogFile(String logFile);
-
-  String getConfFile();
-
-  void setConfFile(String confFile);
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobController.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobController.java
deleted file mode 100644
index 3d5189e..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobController.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.jobs;
-
-import org.apache.ambari.view.hive.client.Cursor;
-import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
-
-public interface JobController {
-  void submit();
-
-  void cancel() throws ItemNotFound;
-
-  Job getJob();
-
-  /**
-   * Use carefully. Returns unproxied bean object
-   * @return unproxied bean object
-   */
-  Job getJobPOJO();
-
-  Cursor getResults() throws ItemNotFound;
-
-  void afterCreation();
-
-  void onRead();
-
-  boolean isModified();
-
-  void clearModified();
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobControllerFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobControllerFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobControllerFactory.java
deleted file mode 100644
index f6ec5b1..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobControllerFactory.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.jobs;
-
-import org.apache.ambari.view.ViewContext;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class JobControllerFactory {
-  private ViewContext context;
-
-  private JobControllerFactory(ViewContext context) {
-    this.context = context;
-  }
-
-  private static Map<String, JobControllerFactory> viewSingletonObjects = new HashMap<String, JobControllerFactory>();
-  public static JobControllerFactory getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), new JobControllerFactory(context));
-    return viewSingletonObjects.get(context.getInstanceName());
-  }
-
-  public JobController createControllerForJob(Job job) {
-    return new JobControllerImpl(context, job);
-  }
-}


[3/6] ambari git commit: AMBARI-10035. Hive View: Retrieve history from ATS (alexantonenko)

Posted by al...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobImpl.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobImpl.java
new file mode 100644
index 0000000..1858b3e
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobImpl.java
@@ -0,0 +1,226 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
+
+import org.apache.commons.beanutils.PropertyUtils;
+
+import java.lang.reflect.InvocationTargetException;
+import java.util.Map;
+
+/**
+ * Bean to represent saved query
+ */
+public class JobImpl implements Job {
+  private String title = null;
+  private String queryFile = null;
+  private String statusDir = null;
+  private Long dateSubmitted = 0L;
+  private Long duration = 0L;
+  private String status = JOB_STATE_UNKNOWN;
+  private String forcedContent = null;
+  private String dataBase = null;
+  private String queryId = null;
+
+  private String applicationId;
+  private String dagName;
+
+  private String id = null;
+  private String owner = null;
+
+  private String logFile;
+  private String confFile;
+
+  public JobImpl() {}
+  public JobImpl(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
+    for (Map.Entry<String, Object> entry : stringObjectMap.entrySet())  {
+      try {
+        PropertyUtils.setProperty(this, entry.getKey(), entry.getValue());
+      } catch (NoSuchMethodException e) {
+        //do nothing, skip
+      }
+    }
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (!(o instanceof Job)) return false;
+
+    JobImpl job = (JobImpl) o;
+
+    if (id != null ? !id.equals(job.id) : job.id != null) return false;
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    return id != null ? id.hashCode() : 0;
+  }
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public String getOwner() {
+    return owner;
+  }
+
+  @Override
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  @Override
+  public String getTitle() {
+    return title;
+  }
+
+  @Override
+  public void setTitle(String title) {
+    this.title = title;
+  }
+
+  @Override
+  public String getQueryFile() {
+    return queryFile;
+  }
+
+  @Override
+  public void setQueryFile(String queryFile) {
+    this.queryFile = queryFile;
+  }
+
+  @Override
+  public Long getDateSubmitted() {
+    return dateSubmitted;
+  }
+
+  @Override
+  public void setDateSubmitted(Long dateSubmitted) {
+    this.dateSubmitted = dateSubmitted;
+  }
+
+  @Override
+  public Long getDuration() {
+    return duration;
+  }
+
+  @Override
+  public void setDuration(Long duration) {
+    this.duration = duration;
+  }
+
+  @Override
+  public String getStatus() {
+    return status;
+  }
+
+  @Override
+  public void setStatus(String status) {
+    this.status = status;
+  }
+
+  @Override
+  public String getForcedContent() {
+    return forcedContent;
+  }
+
+  @Override
+  public void setForcedContent(String forcedContent) {
+    this.forcedContent = forcedContent;
+  }
+
+  @Override
+  public String getQueryId() {
+    return queryId;
+  }
+
+  @Override
+  public void setQueryId(String queryId) {
+    this.queryId = queryId;
+  }
+
+  @Override
+  public String getStatusDir() {
+    return statusDir;
+  }
+
+  @Override
+  public void setStatusDir(String statusDir) {
+    this.statusDir = statusDir;
+  }
+
+  @Override
+  public String getDataBase() {
+    return dataBase;
+  }
+
+  @Override
+  public void setDataBase(String dataBase) {
+    this.dataBase = dataBase;
+  }
+
+  @Override
+  public String getLogFile() {
+    return logFile;
+  }
+
+  @Override
+  public void setLogFile(String logFile) {
+    this.logFile = logFile;
+  }
+
+  @Override
+  public String getConfFile() {
+    return confFile;
+  }
+
+  @Override
+  public void setConfFile(String confFile) {
+    this.confFile = confFile;
+  }
+
+  @Override
+  public String getApplicationId() {
+    return applicationId;
+  }
+
+  @Override
+  public void setApplicationId(String applicationId) {
+    this.applicationId = applicationId;
+  }
+
+  @Override
+  public String getDagName() {
+    return dagName;
+  }
+
+  @Override
+  public void setDagName(String DagName) {
+    this.dagName = DagName;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobResourceManager.java
new file mode 100644
index 0000000..101e328
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobResourceManager.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.client.*;
+import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.hive.utils.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.*;
+
+/**
+ * Object that provides CRUD operations for query objects
+ */
+public class JobResourceManager extends PersonalCRUDResourceManager<Job> {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(JobResourceManager.class);
+
+  private IJobControllerFactory jobControllerFactory;
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public JobResourceManager(SharedObjectsFactory sharedObjectsFactory, ViewContext context) {
+    super(JobImpl.class, sharedObjectsFactory, context);
+    jobControllerFactory = sharedObjectsFactory.getJobControllerFactory();
+  }
+
+  @Override
+  public Job create(Job object) {
+    super.create(object);
+    JobController jobController = jobControllerFactory.createControllerForJob(object);
+
+    try {
+
+      jobController.afterCreation();
+      saveIfModified(jobController);
+
+    } catch (ServiceFormattedException e) {
+      cleanupAfterErrorAndThrowAgain(object, e);
+    }
+
+    return object;
+  }
+
+  public void saveIfModified(JobController jobController) {
+    if (jobController.isModified()) {
+      save(jobController.getJobPOJO());
+      jobController.clearModified();
+    }
+  }
+
+
+  @Override
+  public Job read(Object id) throws ItemNotFound {
+    Job job = super.read(id);
+    JobController jobController =  jobControllerFactory.createControllerForJob(job);
+    jobController.update();
+    saveIfModified(jobController);
+    return job;
+  }
+
+  @Override
+  public List<Job> readAll(FilteringStrategy filteringStrategy) {
+    return super.readAll(filteringStrategy);
+  }
+
+  @Override
+  public void delete(Object resourceId) throws ItemNotFound {
+    super.delete(resourceId);
+  }
+
+  public JobController readController(Object id) throws ItemNotFound {
+    Job job = read(id);
+    return jobControllerFactory.createControllerForJob(job);
+  }
+
+  public Cursor getJobResultsCursor(Job job) {
+    try {
+      JobController jobController = jobControllerFactory.createControllerForJob(job);
+      return jobController.getResults();
+    } catch (ItemNotFound itemNotFound) {
+      throw new NotFoundFormattedException("Job results are expired", null);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceItem.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceItem.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceItem.java
index ddd9990..c7ed078 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceItem.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceItem.java
@@ -32,7 +32,7 @@ public class FileResourceItem implements Serializable, PersonalResource {
   private String name;
   private String path;
 
-  private Integer id;
+  private String id;
   private String owner;
 
   public FileResourceItem() {}
@@ -41,12 +41,12 @@ public class FileResourceItem implements Serializable, PersonalResource {
   }
 
   @Override
-  public Integer getId() {
+  public String getId() {
     return id;
   }
 
   @Override
-  public void setId(Integer id) {
+  public void setId(String id) {
     this.id = id;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceManager.java
index 31d9e23..822ae3c 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceManager.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceManager.java
@@ -19,6 +19,7 @@
 package org.apache.ambari.view.hive.resources.resources;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
@@ -38,8 +39,8 @@ public class FileResourceResourceManager extends PersonalCRUDResourceManager<Fil
    * Constructor
    * @param context View Context instance
    */
-  public FileResourceResourceManager(ViewContext context) {
-    super(FileResourceItem.class, context);
+  public FileResourceResourceManager(IStorageFactory storageFactory, ViewContext context) {
+    super(FileResourceItem.class, storageFactory, context);
   }
 
   @Override
@@ -48,12 +49,12 @@ public class FileResourceResourceManager extends PersonalCRUDResourceManager<Fil
   }
 
   @Override
-  public FileResourceItem read(Integer id) throws ItemNotFound {
+  public FileResourceItem read(Object id) throws ItemNotFound {
     return super.read(id);
   }
 
   @Override
-  public void delete(Integer resourceId) throws ItemNotFound {
+  public void delete(Object resourceId) throws ItemNotFound {
     super.delete(resourceId);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java
index 5b9fc65..76b77dd 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java
@@ -22,7 +22,7 @@ import com.google.inject.Inject;
 import org.apache.ambari.view.*;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -44,7 +44,7 @@ public class FileResourceResourceProvider implements ResourceProvider<FileResour
 
   protected synchronized FileResourceResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new FileResourceResourceManager(context);
+      resourceManager = new FileResourceResourceManager(new SharedObjectsFactory(context), context);
     }
     return resourceManager;
   }
@@ -52,7 +52,7 @@ public class FileResourceResourceProvider implements ResourceProvider<FileResour
   @Override
   public FileResourceItem getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      return getResourceManager().read(Integer.valueOf(resourceId));
+      return getResourceManager().read(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -88,7 +88,7 @@ public class FileResourceResourceProvider implements ResourceProvider<FileResour
       throw new SystemException("error on updating resource", e);
     }
     try {
-      getResourceManager().update(item, Integer.valueOf(resourceId));
+      getResourceManager().update(item, resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -98,7 +98,7 @@ public class FileResourceResourceProvider implements ResourceProvider<FileResour
   @Override
   public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      getResourceManager().delete(Integer.valueOf(resourceId));
+      getResourceManager().delete(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceService.java
index 2993280..222cf03 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceService.java
@@ -57,7 +57,7 @@ public class FileResourceService extends BaseService {
 
   protected synchronized FileResourceResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new FileResourceResourceManager(context);
+      resourceManager = new FileResourceResourceManager(getSharedObjectsFactory(), context);
     }
     return resourceManager;
   }
@@ -70,10 +70,9 @@ public class FileResourceService extends BaseService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response getOne(@PathParam("id") String id) {
     try {
-      FileResourceItem FileResourceItem = null;
-      FileResourceItem = getResourceManager().read(Integer.valueOf(id));
+      FileResourceItem fileResourceItem = getResourceManager().read(id);
       JSONObject object = new JSONObject();
-      object.put("fileResource", FileResourceItem);
+      object.put("fileResource", fileResourceItem);
       return Response.ok(object).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -91,7 +90,7 @@ public class FileResourceService extends BaseService {
   @Path("{id}")
   public Response delete(@PathParam("id") String id) {
     try {
-      getResourceManager().delete(Integer.valueOf(id));
+      getResourceManager().delete(id);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -132,7 +131,7 @@ public class FileResourceService extends BaseService {
   public Response update(ResourceRequest request,
                          @PathParam("id") String id) {
     try {
-      getResourceManager().update(request.fileResource, Integer.valueOf(id));
+      getResourceManager().update(request.fileResource, id);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQuery.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQuery.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQuery.java
index fd75714..25a7748 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQuery.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQuery.java
@@ -34,7 +34,7 @@ public class SavedQuery implements Serializable, PersonalResource {
   private String title;
   private String shortQuery;
 
-  private Integer id;
+  private String id;
   private String owner;
 
   public SavedQuery() {}
@@ -43,12 +43,12 @@ public class SavedQuery implements Serializable, PersonalResource {
   }
 
   @Override
-  public Integer getId() {
+  public String getId() {
     return id;
   }
 
   @Override
-  public void setId(Integer id) {
+  public void setId(String id) {
     this.id = id;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java
index a3c0f1b..c032bb1 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java
@@ -19,6 +19,7 @@
 package org.apache.ambari.view.hive.resources.savedQueries;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
@@ -41,23 +42,15 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
   private final static Logger LOG =
       LoggerFactory.getLogger(SavedQueryResourceManager.class);
 
+  private SharedObjectsFactory sharedObjectsFactory;
+
   /**
    * Constructor
    * @param context View Context instance
    */
-  private SavedQueryResourceManager(ViewContext context) {
-    super(SavedQuery.class, context);
-  }
-
-  //TODO: move all context-singletones to ContextController or smth like that
-  private static Map<String, SavedQueryResourceManager> viewSingletonObjects = new HashMap<String, SavedQueryResourceManager>();
-  public static SavedQueryResourceManager getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), new SavedQueryResourceManager(context));
-    return viewSingletonObjects.get(context.getInstanceName());
-  }
-  static Map<String, SavedQueryResourceManager> getViewSingletonObjects() {
-    return viewSingletonObjects;
+  public SavedQueryResourceManager(ViewContext context, SharedObjectsFactory sharedObjectsFactory) {
+    super(SavedQuery.class, sharedObjectsFactory, context);
+    this.sharedObjectsFactory = sharedObjectsFactory;
   }
 
   @Override
@@ -83,20 +76,20 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
       throw new MisconfigurationFormattedException("scripts.dir");
     }
 
-    String normalizedName = String.format("hive-query-%d", object.getId());
+    String normalizedName = String.format("hive-query-%s", object.getId());
     String timestamp = new SimpleDateFormat("yyyy-MM-dd_hh-mm").format(new Date());
     String baseFileName = String.format(userScriptsPath +
         "/%s-%s", normalizedName, timestamp);
 
-    String newFilePath = HdfsUtil.findUnallocatedFileName(context, baseFileName, ".hql");
-    HdfsUtil.putStringToFile(context, newFilePath, "");
+    String newFilePath = HdfsUtil.findUnallocatedFileName(sharedObjectsFactory.getHdfsApi(), baseFileName, ".hql");
+    HdfsUtil.putStringToFile(sharedObjectsFactory.getHdfsApi(), newFilePath, "");
 
     object.setQueryFile(newFilePath);
-    getStorage().store(SavedQuery.class, object);
+    storageFabric.getStorage().store(SavedQuery.class, object);
   }
 
   @Override
-  public SavedQuery read(Integer id) throws ItemNotFound {
+  public SavedQuery read(Object id) throws ItemNotFound {
     SavedQuery savedQuery = super.read(id);
     fillShortQueryField(savedQuery);
     return savedQuery;
@@ -104,7 +97,7 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
 
   private void fillShortQueryField(SavedQuery savedQuery) {
     if (savedQuery.getQueryFile() != null) {
-      FilePaginator paginator = new FilePaginator(savedQuery.getQueryFile(), context);
+      FilePaginator paginator = new FilePaginator(savedQuery.getQueryFile(), sharedObjectsFactory.getHdfsApi());
       String query = null;
       try {
         query = paginator.readPage(0);
@@ -117,7 +110,14 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
       }
       savedQuery.setShortQuery(query.substring(0, (query.length() > 42)?42:query.length()));
     }
-    getStorage().store(SavedQuery.class, savedQuery);
+    storageFabric.getStorage().store(SavedQuery.class, savedQuery);
+  }
+
+  @Override
+  public SavedQuery update(SavedQuery newObject, String id) throws ItemNotFound {
+    SavedQuery savedQuery = super.update(newObject, id);
+    fillShortQueryField(savedQuery);
+    return savedQuery;
   }
 
   @Override
@@ -126,7 +126,7 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
   }
 
   @Override
-  public void delete(Integer resourceId) throws ItemNotFound {
+  public void delete(Object resourceId) throws ItemNotFound {
     super.delete(resourceId);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java
index 20df9e6..a5561aa 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java
@@ -22,7 +22,7 @@ import com.google.inject.Inject;
 import org.apache.ambari.view.*;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -38,18 +38,24 @@ public class SavedQueryResourceProvider implements ResourceProvider<SavedQuery>
   @Inject
   ViewContext context;
 
-  protected SavedQueryResourceManager resourceManager = null;
   protected final static Logger LOG =
       LoggerFactory.getLogger(SavedQueryResourceProvider.class);
+  private SharedObjectsFactory sharedObjectsFactory;
+
+  public SharedObjectsFactory getSharedObjectsFactory() {
+    if (sharedObjectsFactory == null)
+      sharedObjectsFactory = new SharedObjectsFactory(context);
+    return sharedObjectsFactory;
+  }
 
   protected synchronized SavedQueryResourceManager getResourceManager() {
-    return SavedQueryResourceManager.getInstance(context);
+    return getSharedObjectsFactory().getSavedQueryResourceManager();
   }
 
   @Override
   public SavedQuery getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      return getResourceManager().read(Integer.valueOf(resourceId));
+      return getResourceManager().read(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -85,7 +91,7 @@ public class SavedQueryResourceProvider implements ResourceProvider<SavedQuery>
       throw new SystemException("error on updating resource", e);
     }
     try {
-      getResourceManager().update(item, Integer.valueOf(resourceId));
+      getResourceManager().update(item, resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -95,7 +101,7 @@ public class SavedQueryResourceProvider implements ResourceProvider<SavedQuery>
   @Override
   public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      getResourceManager().delete(Integer.valueOf(resourceId));
+      getResourceManager().delete(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java
index f6d5f41..338457a 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java
@@ -23,7 +23,6 @@ import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
 import org.apache.ambari.view.hive.utils.NotFoundFormattedException;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 import org.json.simple.JSONObject;
@@ -58,7 +57,7 @@ public class SavedQueryService extends BaseService {
       LoggerFactory.getLogger(SavedQueryService.class);
 
   protected synchronized SavedQueryResourceManager getResourceManager() {
-    return SavedQueryResourceManager.getInstance(context);
+    return getSharedObjectsFactory().getSavedQueryResourceManager();
   }
 
   protected void setResourceManager(SavedQueryResourceManager resourceManager) {
@@ -73,8 +72,7 @@ public class SavedQueryService extends BaseService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response getOne(@PathParam("queryId") String queryId) {
     try {
-      SavedQuery savedQuery = null;
-      savedQuery = getResourceManager().read(Integer.valueOf(queryId));
+      SavedQuery savedQuery = getResourceManager().read(queryId);
       JSONObject object = new JSONObject();
       object.put("savedQuery", savedQuery);
       return Response.ok(object).build();
@@ -94,7 +92,7 @@ public class SavedQueryService extends BaseService {
   @Path("{queryId}")
   public Response delete(@PathParam("queryId") String queryId) {
     try {
-      getResourceManager().delete(Integer.valueOf(queryId));
+      getResourceManager().delete(queryId);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -135,7 +133,7 @@ public class SavedQueryService extends BaseService {
   public Response update(SavedQueryRequest request,
                          @PathParam("queryId") String queryId) {
     try {
-      getResourceManager().update(request.savedQuery, Integer.valueOf(queryId));
+      getResourceManager().update(request.savedQuery, queryId);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDF.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDF.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDF.java
index 4a58e38..2dafcf4 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDF.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDF.java
@@ -33,7 +33,7 @@ public class UDF implements Serializable, PersonalResource {
   private String classname;
   private Integer fileResource;
 
-  private Integer id;
+  private String id;
   private String owner;
 
   public UDF() {}
@@ -42,12 +42,12 @@ public class UDF implements Serializable, PersonalResource {
   }
 
   @Override
-  public Integer getId() {
+  public String getId() {
     return id;
   }
 
   @Override
-  public void setId(Integer id) {
+  public void setId(String id) {
     this.id = id;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceManager.java
index cb4264f..98a21b3 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceManager.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceManager.java
@@ -19,6 +19,7 @@
 package org.apache.ambari.view.hive.resources.udfs;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
@@ -38,12 +39,12 @@ public class UDFResourceManager extends PersonalCRUDResourceManager<UDF> {
    * Constructor
    * @param context View Context instance
    */
-  public UDFResourceManager(ViewContext context) {
-    super(UDF.class, context);
+  public UDFResourceManager(IStorageFactory storageFactory, ViewContext context) {
+    super(UDF.class, storageFactory, context);
   }
 
   @Override
-  public UDF read(Integer id) throws ItemNotFound {
+  public UDF read(Object id) throws ItemNotFound {
     return super.read(id);
   }
 
@@ -58,7 +59,7 @@ public class UDFResourceManager extends PersonalCRUDResourceManager<UDF> {
   }
 
   @Override
-  public void delete(Integer resourceId) throws ItemNotFound {
+  public void delete(Object resourceId) throws ItemNotFound {
     super.delete(resourceId);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java
index 70994eb..4117678 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java
@@ -22,7 +22,7 @@ import com.google.inject.Inject;
 import org.apache.ambari.view.*;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -42,9 +42,10 @@ public class UDFResourceProvider implements ResourceProvider<UDF> {
   protected final static Logger LOG =
       LoggerFactory.getLogger(UDFResourceProvider.class);
 
+
   protected synchronized UDFResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new UDFResourceManager(context);
+      resourceManager = new UDFResourceManager(new SharedObjectsFactory(context), context);
     }
     return resourceManager;
   }
@@ -52,7 +53,7 @@ public class UDFResourceProvider implements ResourceProvider<UDF> {
   @Override
   public UDF getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      return getResourceManager().read(Integer.valueOf(resourceId));
+      return getResourceManager().read(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -88,7 +89,7 @@ public class UDFResourceProvider implements ResourceProvider<UDF> {
       throw new SystemException("error on updating resource", e);
     }
     try {
-      getResourceManager().update(item, Integer.valueOf(resourceId));
+      getResourceManager().update(item, resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -98,7 +99,7 @@ public class UDFResourceProvider implements ResourceProvider<UDF> {
   @Override
   public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      getResourceManager().delete(Integer.valueOf(resourceId));
+      getResourceManager().delete(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFService.java
index 864d5b0..aa170c9 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFService.java
@@ -23,8 +23,6 @@ import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.hive.resources.resources.FileResourceItem;
 import org.apache.ambari.view.hive.resources.resources.FileResourceResourceManager;
 import org.apache.ambari.view.hive.utils.NotFoundFormattedException;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
@@ -61,14 +59,14 @@ public class UDFService extends BaseService {
 
   protected synchronized UDFResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new UDFResourceManager(context);
+      resourceManager = new UDFResourceManager(getSharedObjectsFactory(), context);
     }
     return resourceManager;
   }
 
   protected synchronized FileResourceResourceManager getFileResourceResourceManager() {
     if (fileResourceResourceManager == null) {
-      fileResourceResourceManager = new FileResourceResourceManager(context);
+      fileResourceResourceManager = new FileResourceResourceManager(getSharedObjectsFactory(), context);
     }
     return fileResourceResourceManager;
   }
@@ -81,10 +79,9 @@ public class UDFService extends BaseService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response getOne(@PathParam("id") String id) {
     try {
-      UDF UDF = null;
-      UDF = getResourceManager().read(Integer.valueOf(id));
+      UDF udf = getResourceManager().read(id);
       JSONObject object = new JSONObject();
-      object.put("udf", UDF);
+      object.put("udf", udf);
       return Response.ok(object).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -102,7 +99,7 @@ public class UDFService extends BaseService {
   @Path("{id}")
   public Response delete(@PathParam("id") String id) {
     try {
-      getResourceManager().delete(Integer.valueOf(id));
+      getResourceManager().delete(id);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -145,7 +142,7 @@ public class UDFService extends BaseService {
     try {
       if (request.udf.getFileResource() != null)
         getFileResourceResourceManager().read(request.udf.getFileResource());
-      getResourceManager().update(request.udf, Integer.valueOf(id));
+      getResourceManager().update(request.udf, id);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java
index f27e1f9..6282fc9 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java
@@ -40,11 +40,11 @@ public class FilePaginator {
   /**
    * Constructor
    * @param filePath Path to file on HDFS
-   * @param context View Context instance
+   * @param hdfsApi hdfs api
    */
-  public FilePaginator(String filePath, ViewContext context) {
+  public FilePaginator(String filePath, HdfsApi hdfsApi) {
     this.filePath = filePath;
-    hdfsApi = HdfsApi.getInstance(context);
+    this.hdfsApi = hdfsApi;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsApi.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsApi.java
index 9a120fa..e5e3593 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsApi.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsApi.java
@@ -102,7 +102,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public FileStatus[] listdir(final String path) throws FileNotFoundException,
+  public synchronized FileStatus[] listdir(final String path) throws FileNotFoundException,
       IOException, InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<FileStatus[]>() {
       public FileStatus[] run() throws FileNotFoundException, Exception {
@@ -119,7 +119,7 @@ public class HdfsApi {
    * @throws java.io.FileNotFoundException
    * @throws InterruptedException
    */
-  public FileStatus getFileStatus(final String path) throws IOException,
+  public synchronized FileStatus getFileStatus(final String path) throws IOException,
       FileNotFoundException, InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<FileStatus>() {
       public FileStatus run() throws FileNotFoundException, IOException {
@@ -135,7 +135,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public boolean mkdir(final String path) throws IOException,
+  public synchronized boolean mkdir(final String path) throws IOException,
       InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
       public Boolean run() throws Exception {
@@ -152,7 +152,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public boolean rename(final String src, final String dst) throws IOException,
+  public synchronized boolean rename(final String src, final String dst) throws IOException,
       InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
       public Boolean run() throws Exception {
@@ -169,7 +169,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public boolean delete(final String path, final boolean recursive)
+  public synchronized boolean delete(final String path, final boolean recursive)
       throws IOException, InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
       public Boolean run() throws Exception {
@@ -183,7 +183,7 @@ public class HdfsApi {
    * @return home directory
    * @throws Exception
    */
-  public Path getHomeDir() throws Exception {
+  public synchronized Path getHomeDir() throws Exception {
     return ugi.doAs(new PrivilegedExceptionAction<Path>() {
       public Path run() throws IOException {
         return fs.getHomeDirectory();
@@ -196,7 +196,7 @@ public class HdfsApi {
    * @return home directory
    * @throws Exception
    */
-  public FsStatus getStatus() throws Exception {
+  public synchronized FsStatus getStatus() throws Exception {
     return ugi.doAs(new PrivilegedExceptionAction<FsStatus>() {
       public FsStatus run() throws IOException {
         return fs.getStatus();
@@ -212,7 +212,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public FSDataOutputStream create(final String path, final boolean overwrite)
+  public synchronized FSDataOutputStream create(final String path, final boolean overwrite)
       throws IOException, InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
       public FSDataOutputStream run() throws Exception {
@@ -228,7 +228,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public FSDataInputStream open(final String path) throws IOException,
+  public synchronized FSDataInputStream open(final String path) throws IOException,
       InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<FSDataInputStream>() {
       public FSDataInputStream run() throws Exception {
@@ -245,7 +245,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public void copy(final String src, final String dest) throws IOException,
+  public synchronized void copy(final String src, final String dest) throws IOException,
       InterruptedException {
     boolean result = ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
       public Boolean run() throws Exception {
@@ -257,7 +257,7 @@ public class HdfsApi {
     }
   }
 
-  public boolean exists(final String newFilePath) throws IOException, InterruptedException {
+  public synchronized boolean exists(final String newFilePath) throws IOException, InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
       public Boolean run() throws Exception {
         return fs.exists(new Path(newFilePath));
@@ -326,24 +326,7 @@ public class HdfsApi {
     return json;
   }
 
-
-  private static Map<String, HdfsApi> viewSingletonObjects = new HashMap<String, HdfsApi>();
-  /**
-   * Returns HdfsApi object specific to instance
-   * @param context View Context instance
-   * @return Hdfs business delegate object
-   */
-  public static HdfsApi getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), connectToHDFSApi(context));
-    return viewSingletonObjects.get(context.getInstanceName());
-  }
-
-  public static void setInstance(ViewContext context, HdfsApi api) {
-    viewSingletonObjects.put(context.getInstanceName(), api);
-  }
-
-  public static HdfsApi connectToHDFSApi(ViewContext context) {
+  public static synchronized HdfsApi connectToHDFSApi(ViewContext context) {
     HdfsApi api = null;
     Thread.currentThread().setContextClassLoader(null);
 
@@ -392,8 +375,4 @@ public class HdfsApi {
       userName = context.getUsername();
     return userName;
   }
-
-  public static void dropAllConnections() {
-    viewSingletonObjects.clear();
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsUtil.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsUtil.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsUtil.java
index c1c5495..3120958 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsUtil.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsUtil.java
@@ -19,7 +19,6 @@
 package org.apache.ambari.view.hive.utils;
 
 
-import org.apache.ambari.view.ViewContext;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -35,14 +34,14 @@ public class HdfsUtil {
    * @param filePath path to file
    * @param content new content of file
    */
-  public static void putStringToFile(ViewContext context, String filePath, String content) {
-    HdfsApi hdfs = HdfsApi.getInstance(context);
-
+  public static void putStringToFile(HdfsApi hdfs, String filePath, String content) {
     FSDataOutputStream stream;
     try {
-      stream = hdfs.create(filePath, true);
-      stream.writeBytes(content);
-      stream.close();
+      synchronized (hdfs) {
+        stream = hdfs.create(filePath, true);
+        stream.writeBytes(content);
+        stream.close();
+      }
     } catch (IOException e) {
       throw new ServiceFormattedException("Could not write file " + filePath, e);
     } catch (InterruptedException e) {
@@ -57,9 +56,7 @@ public class HdfsUtil {
    * @param extension file extension
    * @return if fullPathAndFilename="/tmp/file",extension=".txt" then filename will be like "/tmp/file_42.txt"
    */
-  public static String findUnallocatedFileName(ViewContext context, String fullPathAndFilename, String extension) {
-    HdfsApi hdfs = HdfsApi.getInstance(context);
-
+  public static String findUnallocatedFileName(HdfsApi hdfs, String fullPathAndFilename, String extension) {
     int triesCount = 0;
     String newFilePath;
     boolean isUnallocatedFilenameFound;

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
new file mode 100644
index 0000000..2c7e242
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.utils;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.client.Connection;
+import org.apache.ambari.view.hive.client.ConnectionFactory;
+import org.apache.ambari.view.hive.client.IConnectionFactory;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
+import org.apache.ambari.view.hive.persistence.Storage;
+import org.apache.ambari.view.hive.persistence.utils.StorageFactory;
+import org.apache.ambari.view.hive.resources.jobs.ConnectionController;
+import org.apache.ambari.view.hive.resources.jobs.OperationHandleControllerFactory;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSParser;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSParserFactory;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.IJobControllerFactory;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobControllerFactory;
+import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryResourceManager;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Generates shared connections. Clients with same tag will get the same connection.
+ * e.g. user 'admin' using view instance 'HIVE1' will use one connection, another user
+ * will use different connection.
+ */
+public class SharedObjectsFactory implements IStorageFactory, IConnectionFactory {
+  private ViewContext context;
+  private IConnectionFactory hiveConnectionFactory;
+  private IStorageFactory storageFactory;
+  private ATSParserFactory atsParserFactory;
+
+  private static final Map<Class, Map<String, Object>> localObjects = new HashMap<Class, Map<String, Object>>();
+
+  public SharedObjectsFactory(ViewContext context) {
+    this.context = context;
+    this.hiveConnectionFactory = new ConnectionFactory(context);
+    this.storageFactory = new StorageFactory(context);
+    this.atsParserFactory = new ATSParserFactory(context);
+
+    synchronized (localObjects) {
+      if (localObjects.size() == 0) {
+        localObjects.put(Connection.class, new HashMap<String, Object>());
+        localObjects.put(OperationHandleControllerFactory.class, new HashMap<String, Object>());
+        localObjects.put(Storage.class, new HashMap<String, Object>());
+        localObjects.put(IJobControllerFactory.class, new HashMap<String, Object>());
+        localObjects.put(ATSParser.class, new HashMap<String, Object>());
+        localObjects.put(SavedQueryResourceManager.class, new HashMap<String, Object>());
+        localObjects.put(HdfsApi.class, new HashMap<String, Object>());
+      }
+    }
+  }
+
+  /**
+   * Returns Connection object specific to unique tag
+   * @return Hdfs business delegate object
+   */
+  @Override
+  public Connection getHiveConnection() {
+    if (!localObjects.get(Connection.class).containsKey(getTagName())) {
+      Connection newConnection = hiveConnectionFactory.getHiveConnection();
+      localObjects.get(Connection.class).put(getTagName(), newConnection);
+    }
+    return (Connection) localObjects.get(Connection.class).get(getTagName());
+  }
+
+  public ConnectionController getHiveConnectionController() {
+    return new ConnectionController(getOperationHandleControllerFactory(), getHiveConnection());
+  }
+
+  // =============================
+
+  public OperationHandleControllerFactory getOperationHandleControllerFactory() {
+    if (!localObjects.get(OperationHandleControllerFactory.class).containsKey(getTagName()))
+      localObjects.get(OperationHandleControllerFactory.class).put(getTagName(), new OperationHandleControllerFactory(this));
+    return (OperationHandleControllerFactory) localObjects.get(OperationHandleControllerFactory.class).get(getTagName());
+  }
+
+  // =============================
+  @Override
+  public Storage getStorage() {
+    if (!localObjects.get(Storage.class).containsKey(getTagName()))
+      localObjects.get(Storage.class).put(getTagName(), storageFactory.getStorage());
+    return (Storage) localObjects.get(Storage.class).get(getTagName());
+  }
+
+  // =============================
+  public IJobControllerFactory getJobControllerFactory() {
+    if (!localObjects.get(IJobControllerFactory.class).containsKey(getTagName()))
+      localObjects.get(IJobControllerFactory.class).put(getTagName(), new JobControllerFactory(context, this));
+    return (IJobControllerFactory) localObjects.get(IJobControllerFactory.class).get(getTagName());
+  }
+
+  // =============================
+
+  public SavedQueryResourceManager getSavedQueryResourceManager() {
+    if (!localObjects.get(SavedQueryResourceManager.class).containsKey(getTagName()))
+      localObjects.get(SavedQueryResourceManager.class).put(getTagName(), new SavedQueryResourceManager(context, this));
+    return (SavedQueryResourceManager) localObjects.get(SavedQueryResourceManager.class).get(getTagName());
+  }
+
+  // =============================
+  public ATSParser getATSParser() {
+    if (!localObjects.get(ATSParser.class).containsKey(getTagName()))
+      localObjects.get(ATSParser.class).put(getTagName(), atsParserFactory.getATSParser());
+    return (ATSParser) localObjects.get(ATSParser.class).get(getTagName());
+  }
+
+  // =============================
+  public HdfsApi getHdfsApi() {
+    if (!localObjects.get(HdfsApi.class).containsKey(getTagName()))
+      localObjects.get(HdfsApi.class).put(getTagName(), HdfsApi.connectToHDFSApi(context));
+    return (HdfsApi) localObjects.get(HdfsApi.class).get(getTagName());
+  }
+
+  /**
+   * Generates tag name. Clients with same tag will share one connection.
+   * @return tag name
+   */
+  public String getTagName() {
+    return String.format("%s:%s", context.getInstanceName(), context.getUsername());
+  }
+
+  /**
+   * For testing purposes, ability to substitute some local object
+   */
+  public void setInstance(Class clazz, Object object) {
+    localObjects.get(clazz).put(getTagName(), object);
+  }
+
+  /**
+   * For testing purposes, ability to clear all local objects of particular class
+   */
+  public void clear(Class clazz) {
+    localObjects.get(clazz).clear();
+  }
+
+  /**
+   * For testing purposes, ability to clear all connections
+   */
+  public void clear() {
+    for(Map<String, Object> map : localObjects.values()) {
+      map.clear();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js
index 8226c9c..34c1f4b 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js
@@ -19,7 +19,7 @@
 import Typeahead from 'ember-cli-selectize/components/ember-selectize';
 import Ember from 'ember';
 
-export default Typeahead.extend({
+export default Typeahead.extend(Ember.I18n.TranslateableProperties, {
   didInsertElement: function() {
     this._super();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js
index 157b917..a5de342 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js
@@ -23,8 +23,8 @@ import constants from 'hive/utils/constants';
 export default Ember.ArrayController.extend(FilterableMixin, {
   itemController: constants.namingConventions.job,
 
-  sortAscending: true,
-  sortProperties: ['dateSubmitted'],
+  sortAscending: false,
+  sortProperties: ['dateSubmittedTimestamp'],
 
   init: function () {
     var oneMonthAgo = new Date();
@@ -40,12 +40,11 @@ export default Ember.ArrayController.extend(FilterableMixin, {
       }),
       Ember.Object.create({
         caption: 'columns.status',
-        property: 'status',
-        classBinding: 'status'
+        property: 'status'
       }),
       Ember.Object.create({
         caption: 'columns.date',
-        property: 'dateSubmitted',
+        property: 'dateSubmittedTimestamp',
         dateRange: Ember.Object.create({
           min: oneMonthAgo,
           max: new Date()

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
index 6f93b11..592bb06 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.Controller.extend({
   needs: [ constants.namingConventions.openQueries,
@@ -26,7 +27,9 @@ export default Ember.Controller.extend({
            constants.namingConventions.jobLogs,
            constants.namingConventions.jobResults,
            constants.namingConventions.jobExplain,
-           constants.namingConventions.settings
+           constants.namingConventions.settings,
+           constants.namingConventions.visualExplain,
+           constants.namingConventions.tezUI
   ],
 
   openQueries: Ember.computed.alias('controllers.' + constants.namingConventions.openQueries),
@@ -36,6 +39,8 @@ export default Ember.Controller.extend({
   results: Ember.computed.alias('controllers.' + constants.namingConventions.jobResults),
   explain: Ember.computed.alias('controllers.' + constants.namingConventions.jobExplain),
   settings: Ember.computed.alias('controllers.' + constants.namingConventions.settings),
+  visualExplain: Ember.computed.alias('controllers.' + constants.namingConventions.visualExplain),
+  tezUI: Ember.computed.alias('controllers.' + constants.namingConventions.tezUI),
 
   canExecute: function () {
     var isModelRunning = this.get('model.isRunning');
@@ -77,7 +82,6 @@ export default Ember.Controller.extend({
 
   _executeQuery: function (shouldExplain) {
     var queryId,
-        self = this,
         query,
         finalQuery,
         job,
@@ -166,8 +170,6 @@ export default Ember.Controller.extend({
     }
 
     queries = queries.map(function (query) {
-      var explainIndex = query.indexOf(constants.namingConventions.explainPrefix);
-
       if (shouldExplain) {
         if (query.indexOf(constants.namingConventions.explainPrefix) === -1) {
           return constants.namingConventions.explainPrefix + query;
@@ -213,7 +215,7 @@ export default Ember.Controller.extend({
     this._super();
 
     // initialize queryParams with an empty array
-    this.set('queryParams', Ember.ArrayProxy.create({ content: Ember.A([]) }))
+    this.set('queryParams', Ember.ArrayProxy.create({ content: Ember.A([]) }));
 
     this.set('queryProcessTabs', Ember.ArrayProxy.create({ content: Ember.A([
       Ember.Object.create({
@@ -232,20 +234,27 @@ export default Ember.Controller.extend({
   },
 
   displayJobTabs: function () {
-    return this.get('content.constructor.typeKey') === constants.namingConventions.job;
+    return this.get('content.constructor.typeKey') === constants.namingConventions.job &&
+           utils.isInteger(this.get('content.id'));
   }.property('content'),
 
   modelChanged: function () {
     var self = this;
     var content = this.get('content');
     var openQueries = this.get('openQueries');
+    var database = this.get('databases').findBy('name', this.get('content.dataBase'));
+
+    if (database) {
+      this.set('databases.selectedDatabase', database);
+    }
 
     //update open queries list when current query model changes
     openQueries.update(content).then(function (isExplainedQuery) {
       var newId = content.get('id');
       var tab = openQueries.getTabForModel(content);
 
-      if (content.get('constructor.typeKey') === constants.namingConventions.job) {
+      //if not an ATS job
+      if (content.get('constructor.typeKey') === constants.namingConventions.job && utils.isInteger(newId)) {
         self.get('queryProcessTabs').forEach(function (queryTab) {
           queryTab.set('id', newId);
         });
@@ -269,7 +278,7 @@ export default Ember.Controller.extend({
       return;
     }
 
-    if (this.get('content.status') !== constants.statuses.finished) {
+    if (!utils.insensitiveCompare(this.get('content.status'), constants.statuses.succeeded)) {
       return;
     }
 
@@ -285,7 +294,7 @@ export default Ember.Controller.extend({
     var tabs = this.get('queryProcessTabs');
     var isResultsTabVisible = tabs.findBy('path', constants.namingConventions.subroutes.jobResults).get('visible');
 
-    if (this.get('content.status') === constants.statuses.finished && isResultsTabVisible) {
+    if (utils.insensitiveCompare(this.get('content.status'), constants.statuses.succeeded) && isResultsTabVisible) {
       items.push({
         title: Ember.I18n.t('buttons.saveHdfs'),
         action: 'saveToHDFS'
@@ -320,7 +329,7 @@ export default Ember.Controller.extend({
   saveToHDFS: function () {
     var job = this.get('content');
 
-    if (job.get('status') !== constants.statuses.finished) {
+    if (!utils.insensitiveCompare(job.get('status'), constants.statuses.succeeded)) {
       return;
     }
 
@@ -347,7 +356,7 @@ export default Ember.Controller.extend({
 
     Ember.run.later(function () {
       Ember.$.getJSON(url).then(function (response) {
-        if (response.status !== constants.results.statuses.terminated) {
+        if (!utils.insensitiveCompare(response.status, constants.results.statuses.terminated)) {
           self.pollSaveToHDFS(response);
         } else {
           self.set('content.isRunning', false);
@@ -413,6 +422,10 @@ export default Ember.Controller.extend({
           id: 'fixture_' + idCounter
         });
 
+        if (idCounter) {
+          model.set('title', model.get('title') + ' (' + idCounter + ')')
+        }
+
         idCounter++;
 
         this.transitionToRoute(constants.namingConventions.subroutes.savedQuery, model);
@@ -424,6 +437,8 @@ export default Ember.Controller.extend({
           wasNew = this.get('model.isNew'),
           defer = Ember.RSVP.defer();
 
+      this.set('model.dataBase', this.get('databases.selectedDatabase.name'));
+
       this.send('openModal', 'modal-save', {
         heading: "modals.save.heading",
         text: this.get('content.title'),
@@ -446,7 +461,7 @@ export default Ember.Controller.extend({
       var subroute;
 
       this._executeQuery().then(function (job) {
-        if (job.get('status') !== constants.statuses.finished) {
+        if (job.get('status') !== constants.statuses.succeeded) {
           subroute = constants.namingConventions.subroutes.jobLogs;
         } else {
           subroute = constants.namingConventions.subroutes.jobResults;
@@ -470,6 +485,23 @@ export default Ember.Controller.extend({
       }, function (err) {
         self.send('addAlert', constants.alerts.error, err.responseText, "alerts.errors.save.query");
       });
+    },
+
+    toggleOverlay: function (targetController) {
+      if (this.get('visualExplain.showOverlay') && targetController !== 'visualExplain') {
+        this.set('visualExplain.showOverlay', false);
+      } else if (this.get('tezUI.showOverlay') && targetController !== 'tezUI') {
+        this.set('tezUI.showOverlay', false);
+      } else if (this.get('settings.showOverlay') && targetController !== 'settings') {
+        this.set('settings.showOverlay', false);
+      }
+
+      if (targetController !== 'settings') {
+        //set content for visual explain and tez ui.
+        this.set(targetController + '.content', this.get('content'));
+      }
+
+      this.toggleProperty(targetController + '.showOverlay');
     }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js
index f633dd4..02edc86 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js
@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.ObjectController.extend({
   needs: [ constants.namingConventions.loadedFiles ],
@@ -73,12 +74,11 @@ export default Ember.ObjectController.extend({
   },
 
   isJobRunning: function (job) {
-    var status = job.get('status');
-
-    return status !== constants.statuses.finished &&
-           status !== constants.statuses.canceled &&
-           status !== constants.statuses.closed &&
-           status !== constants.statuses.error;
+    return utils.insensitiveCompare(job.get('status'),
+                                    constants.statuses.unknown,
+                                    constants.statuses.initialized,
+                                    constants.statuses.running,
+                                    constants.statuses.pending);
   },
 
   getLogs: function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js
index 34e69bd..7977541 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js
@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.ObjectController.extend({
   cachedResults: [],
@@ -51,7 +52,7 @@ export default Ember.ObjectController.extend({
   initResults: function () {
     var existingJob;
 
-    if (this.get('content.status') !== constants.statuses.finished) {
+    if (!utils.insensitiveCompare(this.get('content.status'), constants.statuses.succeeded)) {
       return;
     }
 
@@ -131,4 +132,4 @@ export default Ember.ObjectController.extend({
       }
     }
   }
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job.js
index 5bb1cd5..7e62c20 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job.js
@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.ObjectController.extend({
   needs: [ constants.namingConventions.history, constants.namingConventions.loadedFiles ],
@@ -25,9 +26,7 @@ export default Ember.ObjectController.extend({
   files: Ember.computed.alias('controllers.' + constants.namingConventions.loadedFiles),
 
   canStop: function () {
-    return this.get('status') === constants.statuses.running ||
-           this.get('status') === constants.statuses.initialized ||
-           this.get('status') === constants.statuses.pending;
+    return utils.insensitiveCompare(this.get('status'), constants.statuses.running, constants.statuses.initialized, constants.statuses.pending)
   }.property('status'),
 
   actions: {

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/open-queries.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/open-queries.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/open-queries.js
index e97c0e6..2abfff6 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/open-queries.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/open-queries.js
@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.ArrayController.extend({
   needs: [ constants.namingConventions.databases,
@@ -91,7 +92,9 @@ export default Ember.ArrayController.extend({
           var isExplainedQuery,
               subroute;
 
-          if (model.get('constructor.typeKey') === constants.namingConventions.job) {
+          //jobs that were run from hive ui (exclude ats jobs)
+          if (model.get('constructor.typeKey') === constants.namingConventions.job &&
+              utils.isInteger(model.get('id'))) {
             isExplainedQuery = self.get('currentQuery.fileContent').indexOf(constants.namingConventions.explainPrefix) > -1;
 
             if (isExplainedQuery) {
@@ -251,13 +254,9 @@ export default Ember.ArrayController.extend({
     var hasQueryParams = this.get('index.queryParams.length');
     var hasSettings = this.get('settings').hasSettings(jobId);
 
-    if ( selected && selected[0] !== "" ||
+    return selected && selected[0] !== "" ||
          hasQueryParams ||
-         hasSettings ) {
-      return true;
-    }
-
-    return false;
+         hasSettings;
   },
 
   actions: {

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/queries.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/queries.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/queries.js
index 85730a4..0195dc2 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/queries.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/queries.js
@@ -63,10 +63,7 @@ export default Ember.ArrayController.extend(FilterableMixin, {
   ],
 
   model: function () {
-    var queries = this.get('queries');
-    queries = queries ? queries.filterBy('isNew', false) : queries;
-
-    return this.filter(queries);
+    return this.filter(this.get('queries'));
   }.property('queries', 'filters.@each'),
 
   actions: {

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/settings.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/settings.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/settings.js
index 51101c5..c59fc88 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/settings.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/settings.js
@@ -28,19 +28,28 @@ export default Ember.ArrayController.extend({
   index: Ember.computed.alias('controllers.' + constants.namingConventions.index),
   openQueries: Ember.computed.alias('controllers.' + constants.namingConventions.openQueries),
 
-  showSettingsOverlay: false,
+  predefinedSettings: constants.hiveParameters,
 
-  querySettings: function () {
+  currentSettings: function () {
     var currentId = this.get('index.model.id');
-    return this.findBy('id', currentId);
-  }.property('model.[]', 'index.model.id'),
+    var targetSettings = this.findBy('id', currentId);
+
+   if (!targetSettings) {
+      targetSettings = this.pushObject(Ember.Object.create({
+        id: currentId,
+        settings: []
+      }));
+    }
+
+    return targetSettings;
+  }.property('index.model.id'),
 
   updateSettingsId: function (oldId, newId) {
     this.filterBy('id', oldId).setEach('id', newId);
   },
 
-  getSettingsString: function (id) {
-    var currentId = id ? id : this.get('index.model.id');
+  getSettingsString: function () {
+    var currentId = this.get('index.model.id');
 
     var querySettings = this.findBy('id', currentId);
 
@@ -49,13 +58,9 @@ export default Ember.ArrayController.extend({
     }
 
     var settings = querySettings.get('settings').map(function (setting) {
-      return 'set %@ = %@;'.fmt(setting.key, setting.value);
+      return 'set %@ = %@;'.fmt(setting.get('key.name'), setting.get('value'));
     });
 
-    if (querySettings.get('runOnTez')) {
-      settings.push('set %@ = tez;'.fmt(constants.settings.executionEngine));
-    }
-
     return settings.join("\n");
   },
 
@@ -70,8 +75,7 @@ export default Ember.ArrayController.extend({
     var id = this.get('index.model.id');
     var query = this.get('openQueries.currentQuery');
     var content = query.get('fileContent');
-    var runOnTez = false;
-
+    var self = this;
 
     var regex = new RegExp(/^set\s+[\w-.]+(\s+|\s?)=(\s+|\s?)[\w-.]+(\s+|\s?);/gim);
     var settings = content.match(regex);
@@ -83,68 +87,97 @@ export default Ember.ArrayController.extend({
     query.set('fileContent', content.replace(regex, '').trim());
     settings = settings.map(function (setting) {
       var KV = setting.split('=');
-
-      return {
-        key: KV[0].replace('set', '').trim(),
+      var obj = {
+        key: {
+          name: KV[0].replace('set', '').trim()
+        },
         value: KV[1].replace(';', '').trim()
       };
-    });
 
-    // remove runOnTez from settings
-    settings = settings.findBy('key', constants.settings.executionEngine).without(false);
+      if (!self.get('predefinedSettings').findBy('name', obj.key.name)) {
+        self.get('predefinedSettings').pushObject({
+          name: obj.key.name
+        });
+      }
 
-    this.setSettingForQuery(id, settings, !!runOnTez);
+      return obj;
+    });
+
+    this.setSettingForQuery(id, settings);
   }.observes('openQueries.currentQuery', 'openQueries.tabUpdated'),
 
-  setSettingForQuery: function (id, settings, runOnTez) {
+  setSettingForQuery: function (id, settings) {
     var querySettings = this.findBy('id', id);
 
     if (!querySettings) {
       this.pushObject(Ember.Object.create({
         id: id,
-        settings: settings,
-        runOnTez: runOnTez
+        settings: settings
       }));
     } else {
       querySettings.setProperties({
-        'settings': settings,
-        'runOnTez': runOnTez
+        'settings': settings
       });
     }
   },
 
-  createSettingsForQuery: function () {
-    var currentId = this.get('index.model.id');
+  validate: function() {
+    var settings = this.get('currentSettings.settings') || [];
+    var predefinedSettings = this.get('predefinedSettings');
+
+    settings.forEach(function(setting) {
+      var predefined = predefinedSettings.filterProperty('name', setting.get('key.name'));
+      if (!predefined.length) {
+        return;
+      } else {
+        predefined = predefined[0];
+      }
+
+      if (predefined.values && predefined.values.contains(setting.get('value'))) {
+        setting.set('valid', true);
+        return;
+      }
+
+      if (predefined.validate && predefined.validate.test(setting.get('value'))) {
+        setting.set('valid', true);
+        return;
+      }
+
+      setting.set('valid', false);
+    });
+  }.observes('currentSettings.[]', 'currentSettings.settings.@each.value', 'currentSettings.settings.@each.key'),
 
-    if (!this.findBy('id', currentId)) {
-      this.pushObject(Ember.Object.create({
-        id: currentId,
-        settings: [],
-        runOnTez: false
-      }));
-    }
-  },
+  currentSettingsAreValid: function() {
+    var currentSettings = this.get('currentSettings.settings');
+    var invalid = currentSettings.filterProperty('valid', false);
 
-  actions: {
-    toggleOverlay: function () {
-      // create a setting object if its not already there
-      this.createSettingsForQuery();
-      this.toggleProperty('showSettingsOverlay');
-    },
+    return invalid.length ? false : true;
+  }.property('currentSettings.settings.@each.value', 'currentSettings.settings.@each.key'),
 
+  actions: {
     add: function () {
       var currentId = this.get('index.model.id'),
-       querySettings = this.findBy('id', currentId);
+          querySettings = this.findBy('id', currentId);
 
-      querySettings.settings.pushObject(Ember.Object.create({
-        key: '',
-        value: ''
-      }));
+      var Setting = Ember.Object.extend({
+        valid: true,
+        selection: Ember.Object.create(),
+        value: Ember.computed.alias('selection.value')
+      });
+
+      querySettings.get('settings').pushObject(Setting.create({}));
     },
 
     remove: function (setting) {
-      var currentId = this.get('index.model.id');
-      this.findBy('id', currentId).settings.removeObject(setting);
+      this.findBy('id', this.get('index.model.id')).settings.removeObject(setting);
+    },
+
+    addKey: function (param) {
+      var newKey = this.get('predefinedSettings').pushObject({
+        name: param
+      });
+
+      this.get('currentSettings.settings').findBy('key', null).set('key', newKey);
     }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/tez-ui.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/tez-ui.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/tez-ui.js
new file mode 100644
index 0000000..dc99fd1
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/tez-ui.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Controller.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/visual-explain.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/visual-explain.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/visual-explain.js
new file mode 100644
index 0000000..5275a9b
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/visual-explain.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.ObjectController.extend({
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/helpers/all-uppercase.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/helpers/all-uppercase.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/helpers/all-uppercase.js
new file mode 100644
index 0000000..e5ea321
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/helpers/all-uppercase.js
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export function allUppercase(input) {
+  return input.toUpperCase();
+};
+
+export default Ember.Handlebars.makeBoundHelper(allUppercase);

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
index 0040307..a87bf78 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
@@ -64,7 +64,9 @@ TRANSLATIONS = {
     query: {
       editor: 'Query Editor',
       process: 'Query Process Results',
-      parameters: 'Parameters'
+      parameters: 'Parameters',
+      visualExplain: 'Visual Explain',
+      tez: 'TEZ'
     },
     download: 'Save results...'
   },
@@ -77,7 +79,8 @@ TRANSLATIONS = {
       database: 'Select Database...',
       udfs: 'Insert udfs',
       file: 'Select File Resource...',
-      noFileResource: '(no file)'
+      noFileResource: '(no file)',
+      value: "Select value..."
     },
     fileResource: {
       name: "resource name",

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/models/job.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/models/job.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/models/job.js
index 95e5d57..73fcc20 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/models/job.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/models/job.js
@@ -28,5 +28,17 @@ export default DS.Model.extend({
   status: DS.attr('string'),
   dateSubmitted: DS.attr('date'),
   forcedContent: DS.attr('string'),
-  logFile: DS.attr('string')
+  logFile: DS.attr('string'),
+
+  dateSubmittedTimestamp: function () {
+    var date = this.get('dateSubmitted');
+
+    return date ? date * 1000 : date;
+  }.property('dateSubmitted'),
+
+  uppercaseStatus: function () {
+    var status = this.get('status');
+
+    return status ? status.toUpperCase() : status;
+  }.property('status')
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js
index 8def09e..2f9a5ae 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js
@@ -20,7 +20,7 @@ import Ember from 'ember';
 import constants from 'hive/utils/constants';
 
 export default Ember.Route.extend({
-  setupController: function() {
+  setupController: function () {
     var self = this;
 
     this.controllerFor(constants.namingConventions.databases).set('model', this.store.find(constants.namingConventions.database));
@@ -31,7 +31,7 @@ export default Ember.Route.extend({
   },
 
   actions: {
-    openModal: function(modalTemplate, options) {
+    openModal: function (modalTemplate, options) {
       this.controllerFor(modalTemplate).setProperties({
         heading: options.heading,
         text: options.text,


[6/6] ambari git commit: AMBARI-10045. Host detail page: Component action button isn't disabled while it's action list is empty (alexantonenko)

Posted by al...@apache.org.
AMBARI-10045. Host detail page: Component action button isn't disabled while it's action list is empty (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d786be4a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d786be4a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d786be4a

Branch: refs/heads/trunk
Commit: d786be4ae63c89a1157e54997e79e7aa3439c143
Parents: 751b310
Author: Alex Antonenko <hi...@gmail.com>
Authored: Thu Mar 12 18:37:34 2015 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Thu Mar 12 21:31:20 2015 +0200

----------------------------------------------------------------------
 .../main/host/details/host_component_view.js    |  5 +++--
 .../host/details/host_component_view_test.js    | 20 +++++++++++++++++++-
 2 files changed, 22 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d786be4a/ambari-web/app/views/main/host/details/host_component_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/host/details/host_component_view.js b/ambari-web/app/views/main/host/details/host_component_view.js
index 758a2bd..fece46d 100644
--- a/ambari-web/app/views/main/host/details/host_component_view.js
+++ b/ambari-web/app/views/main/host/details/host_component_view.js
@@ -103,11 +103,12 @@ App.HostComponentView = Em.View.extend({
   /**
    * CSS-class for disabling drop-down menu with list of host component actions
    * Disabled if host's <code>healthClass</code> is health-status-DEAD-YELLOW (lost heartbeat)
+   * Disabled if component's action list is empty
    * @type {String}
    */
   disabled: function () {
-    return (this.get('parentView.content.healthClass') === "health-status-DEAD-YELLOW") ? 'disabled' : '';
-  }.property('parentView.content.healthClass'),
+    return ( (this.get('parentView.content.healthClass') === "health-status-DEAD-YELLOW") || (this.get('noActionAvailable') === 'hidden' && this.get('isRestartComponentDisabled'))) ? 'disabled' : '';
+  }.property('parentView.content.healthClass', 'noActionAvailable', 'isRestartComponentDisabled'),
 
   /**
    * For Upgrade failed state

http://git-wip-us.apache.org/repos/asf/ambari/blob/d786be4a/ambari-web/test/views/main/host/details/host_component_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/host/details/host_component_view_test.js b/ambari-web/test/views/main/host/details/host_component_view_test.js
index 44cb5fa..e0dcb40 100644
--- a/ambari-web/test/views/main/host/details/host_component_view_test.js
+++ b/ambari-web/test/views/main/host/details/host_component_view_test.js
@@ -55,10 +55,26 @@ describe('App.HostComponentView', function() {
     var tests = Em.A([
       {
         parentView: {content: {healthClass: 'health-status-DEAD-YELLOW'}},
+        noActionAvailable: '',
+        isRestartComponentDisabled: true,
         e: 'disabled'
       },
       {
         parentView: {content: {healthClass: 'another-class'}},
+        noActionAvailable: '',
+        isRestartComponentDisabled: true,
+        e: ''
+      },
+      {
+        parentView: {content: {healthClass: 'another-class'}},
+        noActionAvailable: 'hidden',
+        isRestartComponentDisabled: true,
+        e: 'disabled'
+      },
+      {
+        parentView: {content: {healthClass: 'another-class'}},
+        noActionAvailable: 'hidden',
+        isRestartComponentDisabled: false,
         e: ''
       }
     ]);
@@ -68,7 +84,9 @@ describe('App.HostComponentView', function() {
         hostComponentView = App.HostComponentView.create({
           startBlinking: function(){},
           doBlinking: function(){},
-          parentView: test.parentView
+          parentView: test.parentView,
+          noActionAvailable: test.noActionAvailable,
+          isRestartComponentDisabled: test.isRestartComponentDisabled
         });
         expect(hostComponentView.get('disabled')).to.equal(test.e);
       });


[4/6] ambari git commit: AMBARI-10035. Hive View: Retrieve history from ATS (alexantonenko)

Posted by al...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobControllerImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobControllerImpl.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobControllerImpl.java
deleted file mode 100644
index a9f315c..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobControllerImpl.java
+++ /dev/null
@@ -1,326 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.jobs;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive.client.*;
-import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.hive.resources.savedQueries.SavedQuery;
-import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryResourceManager;
-import org.apache.ambari.view.hive.utils.*;
-import org.apache.ambari.view.hive.utils.HdfsApi;
-import org.apache.ambari.view.hive.utils.HdfsUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.lang.reflect.Proxy;
-import java.text.SimpleDateFormat;
-import java.util.*;
-
-public class JobControllerImpl implements JobController, ModifyNotificationDelegate {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(JobControllerImpl.class);
-
-  private ViewContext context;
-  private Job jobUnproxied;
-  private Job job;
-  private boolean modified;
-
-  private OperationHandleControllerFactory operationHandleControllerFactory;
-  private ConnectionController hiveSession;
-  private SavedQueryResourceManager savedQueryResourceManager;
-
-  /**
-   * JobController constructor
-   * Warning: Create JobControllers ONLY using JobControllerFactory!
-   */
-  public JobControllerImpl(ViewContext context, Job job) {
-    this.context = context;
-    setJobPOJO(job);
-    operationHandleControllerFactory = OperationHandleControllerFactory.getInstance(context);
-    hiveSession = ConnectionController.getInstance(context);
-    savedQueryResourceManager = SavedQueryResourceManager.getInstance(context);
-  }
-
-  public String getQueryForJob() {
-    FilePaginator paginator = new FilePaginator(job.getQueryFile(), context);
-    String query;
-    try {
-      query = paginator.readPage(0);  //warning - reading only 0 page restricts size of query to 1MB
-    } catch (IOException e) {
-      throw new ServiceFormattedException("Error when reading file: " + e.toString(), e);
-    } catch (InterruptedException e) {
-      throw new ServiceFormattedException("Error when reading file: " + e.toString(), e);
-    }
-    return query;
-  }
-
-  private static final String DEFAULT_DB = "default";
-  public String getJobDatabase() {
-    if (job.getDataBase() != null) {
-      return job.getDataBase();
-    } else {
-      return DEFAULT_DB;
-    }
-  }
-
-  @Override
-  public void submit() {
-    setupHiveBeforeQueryExecute();
-
-    String query = getQueryForJob();
-    OperationHandleController handleController = hiveSession.executeQuery(query);
-
-    handleController.persistHandleForJob(job);
-  }
-
-  private void setupHiveBeforeQueryExecute() {
-    String database = getJobDatabase();
-    hiveSession.selectDatabase(database);
-  }
-
-  @Override
-  public void cancel() throws ItemNotFound {
-    OperationHandleController handle = operationHandleControllerFactory.getHandleForJob(job);
-    handle.cancel();
-  }
-
-  @Override
-  public void onRead() {
-    updateOperationStatus();
-    updateOperationLogs();
-
-    updateJobDuration();
-  }
-
-  public void updateOperationStatus() {
-    try {
-
-      OperationHandleController handle = operationHandleControllerFactory.getHandleForJob(job);
-      String status = handle.getOperationStatus();
-      job.setStatus(status);
-      LOG.debug("Status of job#" + job.getId() + " is " + job.getStatus());
-
-    } catch (NoOperationStatusSetException e) {
-      LOG.info("Operation state is not set for job#" + job.getId());
-
-    } catch (HiveErrorStatusException e) {
-      LOG.debug("Error updating status for job#" + job.getId() + ": " + e.getMessage());
-      job.setStatus(Job.JOB_STATE_UNKNOWN);
-
-    } catch (HiveClientException e) {
-      throw new ServiceFormattedException("Could not fetch job status " + job.getId(), e);
-
-    } catch (ItemNotFound itemNotFound) {
-      LOG.debug("No TOperationHandle for job#" + job.getId() + ", can't update status");
-    }
-  }
-
-  public void updateOperationLogs() {
-    try {
-      OperationHandleController handle = operationHandleControllerFactory.getHandleForJob(job);
-      String logs = handle.getLogs();
-
-//      LogParser info = LogParser.parseLog(logs);
-
-      String logFilePath = job.getLogFile();
-      HdfsUtil.putStringToFile(context, logFilePath, logs);
-
-    } catch (HiveClientRuntimeException ex) {
-      LOG.error("Error while fetching logs: " + ex.getMessage());
-    } catch (ItemNotFound itemNotFound) {
-      LOG.debug("No TOperationHandle for job#" + job.getId() + ", can't read logs");
-    }
-  }
-
-  public boolean isJobEnded() {
-    String status = job.getStatus();
-    return status.equals(Job.JOB_STATE_FINISHED) || status.equals(Job.JOB_STATE_CANCELED) ||
-        status.equals(Job.JOB_STATE_CLOSED) || status.equals(Job.JOB_STATE_ERROR) ||
-        status.equals(Job.JOB_STATE_UNKNOWN); // Unknown is not finished, but polling makes no sense
-  }
-
-  @Override
-  public Job getJob() {
-    return job;
-  }
-
-  /**
-   * Use carefully. Returns unproxied bean object
-   * @return unproxied bean object
-   */
-  @Override
-  public Job getJobPOJO() {
-    return jobUnproxied;
-  }
-
-  public void setJobPOJO(Job jobPOJO) {
-    Job jobModifyNotificationProxy = (Job) Proxy.newProxyInstance(jobPOJO.getClass().getClassLoader(),
-        new Class[]{Job.class},
-        new ModifyNotificationInvocationHandler(jobPOJO, this));
-    this.job = jobModifyNotificationProxy;
-
-    this.jobUnproxied = jobPOJO;
-  }
-
-  @Override
-  public Cursor getResults() throws ItemNotFound {
-    OperationHandleController handle = operationHandleControllerFactory.getHandleForJob(job);
-    return handle.getResults();
-  }
-
-  @Override
-  public void afterCreation() {
-    setupStatusDirIfNotPresent();
-    setupQueryFileIfNotPresent();
-    setupLogFileIfNotPresent();
-
-    setCreationDate();
-  }
-
-  public void setupLogFileIfNotPresent() {
-    if (job.getLogFile() == null || job.getLogFile().isEmpty()) {
-      setupLogFile();
-    }
-  }
-
-  public void setupQueryFileIfNotPresent() {
-    if (job.getQueryFile() == null || job.getQueryFile().isEmpty()) {
-      setupQueryFile();
-    }
-  }
-
-  public void setupStatusDirIfNotPresent() {
-    if (job.getStatusDir() == null || job.getStatusDir().isEmpty()) {
-      setupStatusDir();
-    }
-  }
-
-  private static final long MillisInSecond = 1000L;
-
-  public void updateJobDuration() {
-    job.setDuration(System.currentTimeMillis() / MillisInSecond - job.getDateSubmitted());
-  }
-
-  public void setCreationDate() {
-    job.setDateSubmitted(System.currentTimeMillis() / MillisInSecond);
-  }
-
-
-
-  private void setupLogFile() {
-    LOG.debug("Creating log file for job#" + job.getId());
-
-    String logFile = job.getStatusDir() + "/" + "logs";
-    HdfsUtil.putStringToFile(context, logFile, "");
-
-    job.setLogFile(logFile);
-    LOG.debug("Log file for job#" + job.getId() + ": " + logFile);
-  }
-
-  private void setupStatusDir() {
-    String newDirPrefix = makeStatusDirectoryPrefix();
-    String newDir = HdfsUtil.findUnallocatedFileName(context, newDirPrefix, "");
-
-    job.setStatusDir(newDir);
-    LOG.debug("Status dir for job#" + job.getId() + ": " + newDir);
-  }
-
-  private String makeStatusDirectoryPrefix() {
-    String userScriptsPath = context.getProperties().get("jobs.dir");
-
-    if (userScriptsPath == null) { // TODO: move check to initialization code
-      String msg = "jobs.dir is not configured!";
-      LOG.error(msg);
-      throw new MisconfigurationFormattedException("jobs.dir");
-    }
-
-    String normalizedName = String.format("hive-job-%d", job.getId());
-    String timestamp = new SimpleDateFormat("yyyy-MM-dd_hh-mm").format(new Date());
-    return String.format(userScriptsPath +
-        "/%s-%s", normalizedName, timestamp);
-  }
-
-  private void setupQueryFile() {
-    String statusDir = job.getStatusDir();
-    assert statusDir != null : "setupStatusDir() should be called first";
-
-    String jobQueryFilePath = statusDir + "/" + "query.hql";
-
-    try {
-
-      if (job.getForcedContent() != null) {
-
-        HdfsUtil.putStringToFile(context, jobQueryFilePath, job.getForcedContent());
-        job.setForcedContent("");  // prevent forcedContent to be written to DB
-
-      }
-      else if (job.getQueryId() != null) {
-
-        String savedQueryFile = getRelatedSavedQueryFile();
-        HdfsApi.getInstance(context).copy(savedQueryFile, jobQueryFilePath);
-        job.setQueryFile(jobQueryFilePath);
-
-      } else {
-
-        throw new BadRequestFormattedException("queryId or forcedContent should be passed!", null);
-
-      }
-
-    } catch (IOException e) {
-      throw new ServiceFormattedException("Error in creation: " + e.toString(), e);
-    } catch (InterruptedException e) {
-      throw new ServiceFormattedException("Error in creation: " + e.toString(), e);
-    }
-    job.setQueryFile(jobQueryFilePath);
-
-    LOG.debug("Query file for job#" + job.getId() + ": " + jobQueryFilePath);
-  }
-
-  private String getRelatedSavedQueryFile() {
-    SavedQuery savedQuery;
-    try {
-      savedQuery = savedQueryResourceManager.read(job.getQueryId());
-    } catch (ItemNotFound itemNotFound) {
-      throw new BadRequestFormattedException("queryId not found!", itemNotFound);
-    }
-    return savedQuery.getQueryFile();
-  }
-
-  @Override
-  public boolean onModification(Object object) {
-    setModified(true);
-    return true;
-  }
-
-  @Override
-  public boolean isModified() {
-    return modified;
-  }
-
-  public void setModified(boolean modified) {
-    this.modified = modified;
-  }
-
-  @Override
-  public void clearModified() {
-    setModified(false);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobImpl.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobImpl.java
deleted file mode 100644
index 7d65957..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobImpl.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.jobs;
-
-import org.apache.commons.beanutils.PropertyUtils;
-
-import java.lang.reflect.InvocationTargetException;
-import java.util.Map;
-
-/**
- * Bean to represent saved query
- */
-public class JobImpl implements Job {
-  private String title = null;
-  private String queryFile = null;
-  private String statusDir = null;
-  private Long dateSubmitted = 0L;
-  private Long duration = 0L;
-  private String status = JOB_STATE_UNKNOWN;
-  private String forcedContent = null;
-  private String dataBase = null;
-  private Integer queryId = null;
-
-  private Integer id = null;
-  private String owner = null;
-
-  private String logFile;
-  private String confFile;
-
-  public JobImpl() {}
-  public JobImpl(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
-    for (Map.Entry<String, Object> entry : stringObjectMap.entrySet())  {
-      try {
-        PropertyUtils.setProperty(this, entry.getKey(), entry.getValue());
-      } catch (NoSuchMethodException e) {
-        //do nothing, skip
-      }
-    }
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) return true;
-    if (!(o instanceof Job)) return false;
-
-    JobImpl job = (JobImpl) o;
-
-    if (id != null ? !id.equals(job.id) : job.id != null) return false;
-
-    return true;
-  }
-
-  @Override
-  public int hashCode() {
-    return id != null ? id.hashCode() : 0;
-  }
-
-  @Override
-  public Integer getId() {
-    return id;
-  }
-
-  @Override
-  public void setId(Integer id) {
-    this.id = id;
-  }
-
-  @Override
-  public String getOwner() {
-    return owner;
-  }
-
-  @Override
-  public void setOwner(String owner) {
-    this.owner = owner;
-  }
-
-  @Override
-  public String getTitle() {
-    return title;
-  }
-
-  @Override
-  public void setTitle(String title) {
-    this.title = title;
-  }
-
-  @Override
-  public String getQueryFile() {
-    return queryFile;
-  }
-
-  @Override
-  public void setQueryFile(String queryFile) {
-    this.queryFile = queryFile;
-  }
-
-  @Override
-  public Long getDateSubmitted() {
-    return dateSubmitted;
-  }
-
-  @Override
-  public void setDateSubmitted(Long dateSubmitted) {
-    this.dateSubmitted = dateSubmitted;
-  }
-
-  @Override
-  public Long getDuration() {
-    return duration;
-  }
-
-  @Override
-  public void setDuration(Long duration) {
-    this.duration = duration;
-  }
-
-  @Override
-  public String getStatus() {
-    return status;
-  }
-
-  @Override
-  public void setStatus(String status) {
-    this.status = status;
-  }
-
-  @Override
-  public String getForcedContent() {
-    return forcedContent;
-  }
-
-  @Override
-  public void setForcedContent(String forcedContent) {
-    this.forcedContent = forcedContent;
-  }
-
-  @Override
-  public Integer getQueryId() {
-    return queryId;
-  }
-
-  @Override
-  public void setQueryId(Integer queryId) {
-    this.queryId = queryId;
-  }
-
-  @Override
-  public String getStatusDir() {
-    return statusDir;
-  }
-
-  @Override
-  public void setStatusDir(String statusDir) {
-    this.statusDir = statusDir;
-  }
-
-  @Override
-  public String getDataBase() {
-    return dataBase;
-  }
-
-  @Override
-  public void setDataBase(String dataBase) {
-    this.dataBase = dataBase;
-  }
-
-  @Override
-  public String getLogFile() {
-    return logFile;
-  }
-
-  @Override
-  public void setLogFile(String logFile) {
-    this.logFile = logFile;
-  }
-
-  @Override
-  public String getConfFile() {
-    return confFile;
-  }
-
-  @Override
-  public void setConfFile(String confFile) {
-    this.confFile = confFile;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceManager.java
deleted file mode 100644
index 139b29a..0000000
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceManager.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.jobs;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive.client.*;
-import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
-import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.hive.utils.*;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.*;
-
-/**
- * Object that provides CRUD operations for query objects
- */
-public class JobResourceManager extends PersonalCRUDResourceManager<Job> {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(JobResourceManager.class);
-
-  private JobControllerFactory jobControllerFactory;
-
-  /**
-   * Constructor
-   * @param context View Context instance
-   */
-  public JobResourceManager(ViewContext context) {
-    super(JobImpl.class, context);
-    jobControllerFactory = JobControllerFactory.getInstance(context);
-  }
-
-  @Override
-  public Job create(Job object) {
-    super.create(object);
-    JobController jobController = jobControllerFactory.createControllerForJob(object);
-
-    try {
-
-      jobController.afterCreation();
-      saveIfModified(jobController);
-
-    } catch (ServiceFormattedException e) {
-      cleanupAfterErrorAndThrowAgain(object, e);
-    }
-
-    return object;
-  }
-
-  private void saveIfModified(JobController jobController) {
-    if (jobController.isModified()) {
-      save(jobController.getJobPOJO());
-      jobController.clearModified();
-    }
-  }
-
-
-  @Override
-  public Job read(Integer id) throws ItemNotFound {
-    Job job = super.read(id);
-    JobController jobController =  jobControllerFactory.createControllerForJob(job);
-    jobController.onRead();
-    saveIfModified(jobController);
-    return job;
-  }
-
-  @Override
-  public List<Job> readAll(FilteringStrategy filteringStrategy) {
-    return super.readAll(filteringStrategy);
-  }
-
-  @Override
-  public void delete(Integer resourceId) throws ItemNotFound {
-    super.delete(resourceId);
-  }
-
-  public JobController readController(Integer id) throws ItemNotFound {
-    Job job = read(id);
-    return jobControllerFactory.createControllerForJob(job);
-  }
-
-  public Cursor getJobResultsCursor(Job job) {
-    try {
-      JobController jobController = jobControllerFactory.createControllerForJob(job);
-      return jobController.getResults();
-    } catch (ItemNotFound itemNotFound) {
-      throw new NotFoundFormattedException("Job results are expired", null);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java
index 780921d..460278e 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java
@@ -22,6 +22,8 @@ import com.google.inject.Inject;
 import org.apache.ambari.view.*;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.*;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -43,7 +45,7 @@ public class JobResourceProvider implements ResourceProvider<Job> {
 
   protected synchronized JobResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new JobResourceManager(context);
+      resourceManager = new JobResourceManager(new SharedObjectsFactory(context), context);
     }
     return resourceManager;
   }
@@ -51,7 +53,7 @@ public class JobResourceProvider implements ResourceProvider<Job> {
   @Override
   public Job getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      return getResourceManager().read(Integer.valueOf(resourceId));
+      return getResourceManager().read(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -74,7 +76,7 @@ public class JobResourceProvider implements ResourceProvider<Job> {
       throw new SystemException("error on creating resource", e);
     }
     getResourceManager().create(item);
-    JobController jobController = JobControllerFactory.getInstance(context).createControllerForJob(item);
+    JobController jobController = new SharedObjectsFactory(context).getJobControllerFactory().createControllerForJob(item);
     jobController.submit();
   }
 
@@ -89,7 +91,7 @@ public class JobResourceProvider implements ResourceProvider<Job> {
       throw new SystemException("error on updating resource", e);
     }
     try {
-      getResourceManager().update(item, Integer.valueOf(resourceId));
+      getResourceManager().update(item, resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -99,7 +101,7 @@ public class JobResourceProvider implements ResourceProvider<Job> {
   @Override
   public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      getResourceManager().delete(Integer.valueOf(resourceId));
+      getResourceManager().delete(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
index 1c4f2a9..d9c69e8 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
@@ -24,7 +24,11 @@ import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.backgroundjobs.BackgroundJobController;
 import org.apache.ambari.view.hive.client.Cursor;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSRequestsDelegate;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSRequestsDelegateImpl;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSParser;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.*;
 import org.apache.ambari.view.hive.utils.*;
 import org.apache.ambari.view.hive.utils.HdfsApi;
 import org.apache.commons.beanutils.PropertyUtils;
@@ -59,16 +63,25 @@ public class JobService extends BaseService {
   ViewResourceHandler handler;
 
   protected JobResourceManager resourceManager;
+  private IOperationHandleResourceManager opHandleResourceManager;
   protected final static Logger LOG =
       LoggerFactory.getLogger(JobService.class);
 
   protected synchronized JobResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new JobResourceManager(context);
+      SharedObjectsFactory connectionsFactory = getSharedObjectsFactory();
+      resourceManager = new JobResourceManager(connectionsFactory, context);
     }
     return resourceManager;
   }
 
+  private IOperationHandleResourceManager getOperationHandleResourceManager() {
+    if (opHandleResourceManager == null) {
+      opHandleResourceManager = new OperationHandleResourceManager(getSharedObjectsFactory());
+    }
+    return opHandleResourceManager;
+  }
+
   /**
    * Get single item
    */
@@ -77,7 +90,7 @@ public class JobService extends BaseService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response getOne(@PathParam("jobId") String jobId) {
     try {
-      JobController jobController = getResourceManager().readController(Integer.valueOf(jobId));
+      JobController jobController = getResourceManager().readController(jobId);
 
       JSONObject jsonJob = jsonObjectFromJob(jobController);
 
@@ -110,7 +123,7 @@ public class JobService extends BaseService {
                                 @Context HttpServletResponse response,
                                 @QueryParam("columns") final String requestedColumns) {
     try {
-      JobController jobController = getResourceManager().readController(Integer.valueOf(jobId));
+      JobController jobController = getResourceManager().readController(jobId);
       final Cursor resultSet = jobController.getResults();
       resultSet.selectColumns(requestedColumns);
 
@@ -153,7 +166,7 @@ public class JobService extends BaseService {
                                    @QueryParam("columns") final String requestedColumns,
                                    @Context HttpServletResponse response) {
     try {
-      final JobController jobController = getResourceManager().readController(Integer.valueOf(jobId));
+      final JobController jobController = getResourceManager().readController(jobId);
 
       String backgroundJobId = "csv" + String.valueOf(jobController.getJob().getId());
       if (commence != null && commence.equals("true")) {
@@ -167,7 +180,7 @@ public class JobService extends BaseService {
               Cursor resultSet = jobController.getResults();
               resultSet.selectColumns(requestedColumns);
 
-              FSDataOutputStream stream = HdfsApi.getInstance(context).create(targetFile, true);
+              FSDataOutputStream stream = getSharedObjectsFactory().getHdfsApi().create(targetFile, true);
               Writer writer = new BufferedWriter(new OutputStreamWriter(stream));
               CSVPrinter csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT);
               try {
@@ -225,7 +238,7 @@ public class JobService extends BaseService {
                              @QueryParam("searchId") String searchId,
                              @QueryParam("columns") final String requestedColumns) {
     try {
-      final JobController jobController = getResourceManager().readController(Integer.valueOf(jobId));
+      final JobController jobController = getResourceManager().readController(jobId);
 
       return ResultsPaginationController.getInstance(context)
            .request(jobId, searchId, true, fromBeginning, count,
@@ -276,13 +289,13 @@ public class JobService extends BaseService {
     try {
       JobController jobController;
       try {
-        jobController = getResourceManager().readController(Integer.valueOf(id));
+        jobController = getResourceManager().readController(id);
       } catch (ItemNotFound itemNotFound) {
         throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
       }
       jobController.cancel();
       if (remove != null && remove.compareTo("true") == 0) {
-        getResourceManager().delete(Integer.valueOf(id));
+        getResourceManager().delete(id);
       }
 //      getResourceManager().delete(Integer.valueOf(queryId));
       return Response.status(204).build();
@@ -303,8 +316,10 @@ public class JobService extends BaseService {
   public Response getList() {
     try {
       LOG.debug("Getting all job");
-      List allJobs = getResourceManager().readAll(
-          new OnlyOwnersFilteringStrategy(this.context.getUsername()));  //TODO: move strategy to PersonalCRUDRM
+      ATSRequestsDelegate transport = new ATSRequestsDelegateImpl(context, "http://127.0.0.1:8188");
+      IATSParser atsParser = new ATSParser(transport);
+      Aggregator aggregator = new Aggregator(getResourceManager(), getOperationHandleResourceManager(), atsParser);
+      List allJobs = aggregator.readAll(context.getUsername());
 
       JSONObject object = new JSONObject();
       object.put("jobs", allJobs);

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java
index 090781c..3952491 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java
@@ -23,29 +23,35 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 public class LogParser {
-  public static final Pattern HADOOP_MR_JOBS_RE = Pattern.compile("(http[^\\s]*/proxy/([a-z0-9_]+?)/)");
-  public static final Pattern HADOOP_TEZ_JOBS_RE = Pattern.compile("\\(Executing on YARN cluster with App id ([a-z0-9_]+?)\\)");
-  private LinkedHashSet<JobId> jobsList;
+  public static final Pattern HADOOP_MR_APPS_RE = Pattern.compile("(http[^\\s]*/proxy/([a-z0-9_]+?)/)");
+  public static final Pattern HADOOP_TEZ_APPS_RE = Pattern.compile("\\(Executing on YARN cluster with App id ([a-z0-9_]+?)\\)");
+  private LinkedHashSet<AppId> appsList;
+
+  private LogParser() {}
 
   public static LogParser parseLog(String logs) {
     LogParser parser = new LogParser();
 
-    LinkedHashSet<JobId> mrJobIds = getMRJobIds(logs);
-    LinkedHashSet<JobId> tezJobIds = getTezJobIds(logs);
+    parser.setAppsList(parseApps(logs, parser));
+    return parser;
+  }
 
-    LinkedHashSet<JobId> jobIds = new LinkedHashSet<JobId>();
-    jobIds.addAll(mrJobIds);
-    jobIds.addAll(tezJobIds);
+  public static LinkedHashSet<AppId> parseApps(String logs, LogParser parser) {
+    LinkedHashSet<AppId> mrAppIds = getMRAppIds(logs);
+    LinkedHashSet<AppId> tezAppIds = getTezAppIds(logs);
 
-    parser.setJobsList(jobIds);
-    return parser;
+    LinkedHashSet<AppId> appIds = new LinkedHashSet<AppId>();
+    appIds.addAll(mrAppIds);
+    appIds.addAll(tezAppIds);
+
+    return appIds;
   }
 
-  private static LinkedHashSet<JobId> getMRJobIds(String logs) {
-    Matcher m = HADOOP_MR_JOBS_RE.matcher(logs);
-    LinkedHashSet<JobId> list = new LinkedHashSet<JobId>();
+  private static LinkedHashSet<AppId> getMRAppIds(String logs) {
+    Matcher m = HADOOP_MR_APPS_RE.matcher(logs);
+    LinkedHashSet<AppId> list = new LinkedHashSet<AppId>();
     while (m.find()) {
-      JobId applicationInfo = new JobId();
+      AppId applicationInfo = new AppId();
       applicationInfo.setTrackingUrl(m.group(1));
       applicationInfo.setIdentifier(m.group(2));
       list.add(applicationInfo);
@@ -53,27 +59,34 @@ public class LogParser {
     return list;
   }
 
-  private static LinkedHashSet<JobId> getTezJobIds(String logs) {
-    Matcher m = HADOOP_TEZ_JOBS_RE.matcher(logs);
-    LinkedHashSet<JobId> list = new LinkedHashSet<JobId>();
+  private static LinkedHashSet<AppId> getTezAppIds(String logs) {
+    Matcher m = HADOOP_TEZ_APPS_RE.matcher(logs);
+    LinkedHashSet<AppId> list = new LinkedHashSet<AppId>();
     while (m.find()) {
-      JobId applicationInfo = new JobId();
-      applicationInfo.setTrackingUrl(null);
+      AppId applicationInfo = new AppId();
+      applicationInfo.setTrackingUrl("");
       applicationInfo.setIdentifier(m.group(1));
       list.add(applicationInfo);
     }
     return list;
   }
 
-  public void setJobsList(LinkedHashSet<JobId> jobsList) {
-    this.jobsList = jobsList;
+  public void setAppsList(LinkedHashSet<AppId> appsList) {
+    this.appsList = appsList;
+  }
+
+  public LinkedHashSet<AppId> getAppsList() {
+    return appsList;
   }
 
-  public LinkedHashSet<JobId> getJobsList() {
-    return jobsList;
+  public AppId getLastAppInList() {
+    Object[] appIds = appsList.toArray();
+    if (appIds.length == 0)
+      return null;
+    return (AppId) appIds[appsList.size()-1];
   }
 
-  public static class JobId {
+  public static class AppId {
     private String trackingUrl;
     private String identifier;
 
@@ -96,11 +109,11 @@ public class LogParser {
     @Override
     public boolean equals(Object o) {
       if (this == o) return true;
-      if (!(o instanceof JobId)) return false;
+      if (!(o instanceof AppId)) return false;
 
-      JobId jobId = (JobId) o;
+      AppId appId = (AppId) o;
 
-      if (!identifier.equals(jobId.identifier)) return false;
+      if (!identifier.equals(appId.identifier)) return false;
 
       return true;
     }
@@ -110,4 +123,16 @@ public class LogParser {
       return identifier.hashCode();
     }
   }
+
+  public static class EmptyAppId extends AppId {
+    @Override
+    public String getTrackingUrl() {
+      return "";
+    }
+
+    @Override
+    public String getIdentifier() {
+      return "";
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java
index 551ebdd..e146d55 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java
@@ -19,10 +19,10 @@
 package org.apache.ambari.view.hive.resources.jobs;
 
 
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive.client.ConnectionPool;
 import org.apache.ambari.view.hive.client.Cursor;
 import org.apache.ambari.view.hive.client.HiveClientException;
+import org.apache.ambari.view.hive.client.IConnectionFactory;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 import org.apache.hive.service.cli.thrift.TGetOperationStatusResp;
 import org.apache.hive.service.cli.thrift.TOperationHandle;
@@ -33,18 +33,16 @@ public class OperationHandleController {
   private final static Logger LOG =
       LoggerFactory.getLogger(OperationHandleController.class);
 
-  private ViewContext context;
+  private IConnectionFactory connectionsFabric;
   private TOperationHandle operationHandle;
-  private OperationHandleResourceManager operationHandlesStorage;
+  private IOperationHandleResourceManager operationHandlesStorage;
 
-  public OperationHandleController(ViewContext context, TOperationHandle storedOperationHandle, OperationHandleResourceManager operationHandlesStorage) {
-    this.context = context;
+  public OperationHandleController(IConnectionFactory connectionsFabric, TOperationHandle storedOperationHandle, IOperationHandleResourceManager operationHandlesStorage) {
+    this.connectionsFabric = connectionsFabric;
     this.operationHandle = storedOperationHandle;
     this.operationHandlesStorage = operationHandlesStorage;
   }
 
-
-
   public TOperationHandle getStoredOperationHandle() {
     return operationHandle;
   }
@@ -54,7 +52,7 @@ public class OperationHandleController {
   }
 
   public String getOperationStatus() throws NoOperationStatusSetException, HiveClientException {
-    TGetOperationStatusResp statusResp = ConnectionPool.getConnection(context).getOperationStatus(operationHandle);
+    TGetOperationStatusResp statusResp = connectionsFabric.getHiveConnection().getOperationStatus(operationHandle);
     if (!statusResp.isSetOperationState()) {
       throw new NoOperationStatusSetException("Operation state is not set");
     }
@@ -93,7 +91,7 @@ public class OperationHandleController {
 
   public void cancel() {
     try {
-      ConnectionPool.getConnection(context).cancelOperation(operationHandle);
+      connectionsFabric.getHiveConnection().cancelOperation(operationHandle);
     } catch (HiveClientException e) {
       throw new ServiceFormattedException("Cancel failed: " + e.toString(), e);
     }
@@ -104,10 +102,10 @@ public class OperationHandleController {
   }
 
   public String getLogs() {
-    return ConnectionPool.getConnection(context).getLogs(operationHandle);
+    return connectionsFabric.getHiveConnection().getLogs(operationHandle);
   }
 
   public Cursor getResults() {
-    return ConnectionPool.getConnection(context).getResults(operationHandle);
+    return connectionsFabric.getHiveConnection().getResults(operationHandle);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java
index 5d4a8af..0310855 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java
@@ -18,31 +18,22 @@
 
 package org.apache.ambari.view.hive.resources.jobs;
 
-import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.apache.hive.service.cli.thrift.TOperationHandle;
 
-import java.util.HashMap;
-import java.util.Map;
-
 public class OperationHandleControllerFactory {
-  private ViewContext context;
-  private OperationHandleResourceManager operationHandlesStorage;
-
-  private OperationHandleControllerFactory(ViewContext context) {
-    this.context = context;
-    operationHandlesStorage = new OperationHandleResourceManager(context);
-  }
+  private SharedObjectsFactory connectionsFabric;
+  private IOperationHandleResourceManager operationHandlesStorage;
 
-  private static Map<String, OperationHandleControllerFactory> viewSingletonObjects = new HashMap<String, OperationHandleControllerFactory>();
-  public static OperationHandleControllerFactory getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), new OperationHandleControllerFactory(context));
-    return viewSingletonObjects.get(context.getInstanceName());
+  public OperationHandleControllerFactory(SharedObjectsFactory connectionsFabric) {
+    this.connectionsFabric = connectionsFabric;
+    operationHandlesStorage = new OperationHandleResourceManager(connectionsFabric);
   }
 
   public OperationHandleController createControllerForHandle(TOperationHandle storedOperationHandle) {
-    return new OperationHandleController(context, storedOperationHandle, operationHandlesStorage);
+    return new OperationHandleController(connectionsFabric, storedOperationHandle, operationHandlesStorage);
   }
 
   public OperationHandleController getHandleForJob(Job job) throws ItemNotFound {

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleResourceManager.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleResourceManager.java
index cffed38..5004677 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleResourceManager.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleResourceManager.java
@@ -18,42 +18,43 @@
 
 package org.apache.ambari.view.hive.resources.jobs;
 
-import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive.persistence.utils.Indexed;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.resources.SharedCRUDResourceManager;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 import org.apache.hive.service.cli.thrift.TOperationHandle;
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
 
 import java.util.List;
 
-public class OperationHandleResourceManager extends SharedCRUDResourceManager<StoredOperationHandle> {
+public class OperationHandleResourceManager extends SharedCRUDResourceManager<StoredOperationHandle>
+    implements IOperationHandleResourceManager {
   /**
    * Constructor
-   *
-   * @param context       View Context instance
    */
-  public OperationHandleResourceManager(ViewContext context) {
-    super(StoredOperationHandle.class, context);
+  public OperationHandleResourceManager(IStorageFactory storageFabric) {
+    super(StoredOperationHandle.class, storageFabric);
   }
 
+  @Override
   public List<StoredOperationHandle> readJobRelatedHandles(final Job job) {
-    try {
-      return getStorage().loadWhere(StoredOperationHandle.class, "jobId = " + job.getId());
-    } catch (NotImplementedException e) {
-      // fallback to filtering strategy
-      return getStorage().loadAll(StoredOperationHandle.class, new FilteringStrategy() {
-        @Override
-        public boolean isConform(Indexed item) {
-          StoredOperationHandle handle = (StoredOperationHandle) item;
-          return (handle.getJobId() != null && handle.getJobId().equals(job.getId()));
-        }
-      });
-    }
+    return storageFabric.getStorage().loadAll(StoredOperationHandle.class, new FilteringStrategy() {
+      @Override
+      public boolean isConform(Indexed item) {
+        StoredOperationHandle handle = (StoredOperationHandle) item;
+        return (handle.getJobId() != null && handle.getJobId().equals(job.getId()));
+      }
+
+      @Override
+      public String whereStatement() {
+        return "jobId = '" + job.getId() + "'";
+      }
+    });
   }
 
+  @Override
   public void putHandleForJob(TOperationHandle h, Job job) {
     StoredOperationHandle handle = StoredOperationHandle.buildFromTOperationHandle(h);
     handle.setJobId(job.getId());
@@ -71,11 +72,13 @@ public class OperationHandleResourceManager extends SharedCRUDResourceManager<St
     }
   }
 
+  @Override
   public boolean containsHandleForJob(Job job) {
     List<StoredOperationHandle> jobRelatedHandles = readJobRelatedHandles(job);
     return jobRelatedHandles.size() > 0;
   }
 
+  @Override
   public TOperationHandle getHandleForJob(Job job) throws ItemNotFound {
     List<StoredOperationHandle> jobRelatedHandles = readJobRelatedHandles(job);
     if (jobRelatedHandles.size() == 0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/StoredOperationHandle.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/StoredOperationHandle.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/StoredOperationHandle.java
index 193b226..1d3f6e0 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/StoredOperationHandle.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/StoredOperationHandle.java
@@ -40,9 +40,9 @@ public class StoredOperationHandle implements Indexed {
   private String guid;
   private String secret;
 
-  private Integer jobId;
+  private String jobId;
 
-  private Integer id;
+  private String id;
 
   public StoredOperationHandle() {}
   public StoredOperationHandle(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
@@ -126,21 +126,21 @@ public class StoredOperationHandle implements Indexed {
     this.secret = secret;
   }
 
-  public Integer getJobId() {
+  public String getJobId() {
     return jobId;
   }
 
-  public void setJobId(Integer jobId) {
+  public void setJobId(String jobId) {
     this.jobId = jobId;
   }
 
   @Override
-  public Integer getId() {
+  public String getId() {
     return id;
   }
 
   @Override
-  public void setId(Integer id) {
+  public void setId(String id) {
     this.id = id;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParser.java
new file mode 100644
index 0000000..b644d4c
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParser.java
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.LinkedList;
+import java.util.List;
+
+public class ATSParser implements IATSParser {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(ATSParser.class);
+
+  private ATSRequestsDelegate delegate;
+
+  private static final long MillisInSecond = 1000L;
+
+  public ATSParser(ATSRequestsDelegate delegate) {
+    this.delegate = delegate;
+  }
+
+  @Override
+  public List<HiveQueryId> getHiveQuieryIdsList(String username) {
+    JSONObject entities = delegate.hiveQueryIdList(username);
+    JSONArray jobs = (JSONArray) entities.get("entities");
+
+    List<HiveQueryId> parsedJobs = new LinkedList<HiveQueryId>();
+    for(Object job : jobs) {
+      try {
+        HiveQueryId parsedJob = parseAtsHiveJob((JSONObject) job);
+        parsedJobs.add(parsedJob);
+      } catch (Exception ex) {
+        LOG.error("Error while parsing ATS job", ex);
+      }
+    }
+
+    return parsedJobs;
+  }
+
+  @Override
+  public HiveQueryId getHiveQuieryIdByOperationId(byte[] guid) {
+    String guidString = new String(guid);
+    JSONObject entities = delegate.hiveQueryIdByOperationId(guidString);
+    JSONArray jobs = (JSONArray) entities.get("entities");
+
+    assert jobs.size() <= 1;
+    if (jobs.size() == 0) {
+      //TODO: throw appropriate exception
+      throw new ServiceFormattedException("HIVE_QUERY_ID with operationid=" + guidString + " not found");
+    }
+
+    return parseAtsHiveJob((JSONObject) jobs.get(0));
+  }
+
+  @Override
+  public TezDagId getTezDAGByName(String name) {
+    JSONArray tezDagEntities = (JSONArray) delegate.tezDagByName(name).get("entities");
+    assert tezDagEntities.size() <= 1;
+    if (tezDagEntities.size() == 0) {
+      return new TezDagId();
+    }
+    JSONObject tezDagEntity = (JSONObject) tezDagEntities.get(0);
+
+    TezDagId parsedDag = new TezDagId();
+    JSONArray applicationIds = (JSONArray) ((JSONObject) tezDagEntity.get("primaryfilters")).get("applicationId");
+    parsedDag.applicationId = (String) applicationIds.get(0);
+    parsedDag.status = (String) ((JSONObject) tezDagEntity.get("otherinfo")).get("status");
+    return parsedDag;
+  }
+
+  private HiveQueryId parseAtsHiveJob(JSONObject job) {
+    HiveQueryId parsedJob = new HiveQueryId();
+
+    parsedJob.entity = (String) job.get("entity");
+    parsedJob.starttime = ((Long) job.get("starttime")) / MillisInSecond;
+
+    JSONObject primaryfilters = (JSONObject) job.get("primaryfilters");
+    JSONArray operationIds = (JSONArray) primaryfilters.get("operationid");
+    if (operationIds != null) {
+      parsedJob.operationId = (String) (operationIds).get(0);
+    }
+    JSONArray users = (JSONArray) primaryfilters.get("user");
+    if (users != null) {
+      parsedJob.user = (String) (users).get(0);
+    }
+
+    JSONObject lastEvent = getLastEvent(job);
+    long lastEventTimestamp = ((Long) lastEvent.get("timestamp")) / MillisInSecond;
+
+    parsedJob.duration = lastEventTimestamp - parsedJob.starttime;
+
+    JSONObject otherinfo = (JSONObject) job.get("otherinfo");
+    JSONObject query = (JSONObject) JSONValue.parse((String) otherinfo.get("QUERY"));
+
+    parsedJob.query = (String) query.get("queryText");
+    JSONObject stages = (JSONObject) ((JSONObject) query.get("queryPlan")).get("STAGE PLANS");
+
+    List<String> dagIds = new LinkedList<String>();
+    List<JSONObject> stagesList = new LinkedList<JSONObject>();
+
+    for (Object key : stages.keySet()) {
+      JSONObject stage = (JSONObject) stages.get(key);
+      if (stage.get("Tez") != null) {
+        String dagId = (String) ((JSONObject) stage.get("Tez")).get("DagName:");
+        dagIds.add(dagId);
+      }
+      stagesList.add(stage);
+    }
+    parsedJob.dagNames = dagIds;
+    parsedJob.stages = stagesList;
+    return parsedJob;
+  }
+
+  private JSONObject getLastEvent(JSONObject atsEntity) {
+    JSONArray events = (JSONArray) atsEntity.get("events");
+    return (JSONObject) events.get(0);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParserFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParserFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParserFactory.java
new file mode 100644
index 0000000..f5e9bcf
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParserFactory.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+import org.apache.ambari.view.ViewContext;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class ATSParserFactory {
+
+  private ViewContext context;
+
+  public ATSParserFactory(ViewContext context) {
+    this.context = context;
+  }
+
+  public ATSParser getATSParser() {
+    ATSRequestsDelegateImpl delegate = new ATSRequestsDelegateImpl(context, getATSUrl(context));
+    return new ATSParser(delegate);
+  }
+
+  public static String getATSUrl(ViewContext context) {
+    return context.getProperties().get("yarn.ats.url");
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegate.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegate.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegate.java
new file mode 100644
index 0000000..3aa07d4
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegate.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+import org.json.simple.JSONObject;
+
+public interface ATSRequestsDelegate {
+  JSONObject hiveQueryIdList(String username);
+
+  JSONObject hiveQueryIdByOperationId(String operationId);
+
+  JSONObject tezDagByName(String name);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
new file mode 100644
index 0000000..047bd63
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.commons.io.IOUtils;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+
+public class ATSRequestsDelegateImpl implements ATSRequestsDelegate {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(ATSRequestsDelegateImpl.class);
+  public static final String EMPTY_ENTITIES_JSON = "{ \"entities\" : [  ] }";
+
+  private ViewContext context;
+  private String atsUrl;
+
+  public ATSRequestsDelegateImpl(ViewContext context, String atsUrl) {
+    this.context = context;
+    this.atsUrl = atsUrl;
+  }
+
+  @Override
+  public JSONObject hiveQueryIdList(String username) {
+    String hiveQueriesListUrl = atsUrl + "/ws/v1/timeline/HIVE_QUERY_ID?primaryFilter=requestuser:" + username;
+    String response = readFromWithDefault(hiveQueriesListUrl, "{ \"entities\" : [  ] }");
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  @Override
+  public JSONObject hiveQueryIdByOperationId(String operationId) {
+    String hiveQueriesListUrl = atsUrl + "/ws/v1/timeline/HIVE_QUERY_ID?primaryFilter=operationid:" + operationId;
+    String response = readFromWithDefault(hiveQueriesListUrl, "{ \"entities\" : [  ] }");
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  @Override
+  public JSONObject tezDagByName(String name) {
+    String tezDagUrl = atsUrl + "/ws/v1/timeline/TEZ_DAG_ID?primaryFilter=dagName:" + name;
+    String response = readFromWithDefault(tezDagUrl, EMPTY_ENTITIES_JSON);
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  protected String readFromWithDefault(String hiveQueriesListUrl, String defaultResponse) {
+    String response;
+    try {
+      InputStream responseInputStream = context.getURLStreamProvider().readFrom(hiveQueriesListUrl, "GET",
+          null, new HashMap<String, String>());
+      response = IOUtils.toString(responseInputStream);
+    } catch (IOException e) {
+      LOG.error("Error while reading from ATS", e);
+      response = defaultResponse;
+    }
+    return response;
+  }
+
+  public String getAtsUrl() {
+    return atsUrl;
+  }
+
+  public void setAtsUrl(String atsUrl) {
+    this.atsUrl = atsUrl;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/HiveQueryId.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/HiveQueryId.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/HiveQueryId.java
new file mode 100644
index 0000000..edb726b
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/HiveQueryId.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+import org.json.simple.JSONObject;
+
+import java.util.List;
+
+public class HiveQueryId {
+  public String entity;
+  public String query;
+
+  public List<String> dagNames;
+
+  public List<JSONObject> stages;
+
+  public long starttime;
+  public long duration;
+  public String operationId;
+  public String user;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/IATSParser.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/IATSParser.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/IATSParser.java
new file mode 100644
index 0000000..d029fdc
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/IATSParser.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+import java.util.List;
+
+public interface IATSParser {
+  List<HiveQueryId> getHiveQuieryIdsList(String username);
+
+  HiveQueryId getHiveQuieryIdByOperationId(byte[] guid);
+
+  TezDagId getTezDAGByName(String name);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/TezDagId.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/TezDagId.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/TezDagId.java
new file mode 100644
index 0000000..061c51c
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/TezDagId.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+public class TezDagId {
+  public static final String STATUS_UNKNOWN = "UNKNOWN";
+  public String applicationId = "";
+  public String dagName = "";
+  public String status = STATUS_UNKNOWN;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/IJobControllerFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/IJobControllerFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/IJobControllerFactory.java
new file mode 100644
index 0000000..89fbb85
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/IJobControllerFactory.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
+
+public interface IJobControllerFactory {
+  JobController createControllerForJob(Job job);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/Job.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/Job.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/Job.java
new file mode 100644
index 0000000..004932c
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/Job.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
+
+
+import org.apache.ambari.view.hive.persistence.utils.Indexed;
+import org.apache.ambari.view.hive.persistence.utils.PersonalResource;
+
+import java.io.Serializable;
+
+/**
+ * Interface for Job bean to create Proxy for it
+ */
+public interface Job extends Serializable,Indexed,PersonalResource {
+  public static final String JOB_STATE_UNKNOWN = "Unknown";
+  public static final String JOB_STATE_INITIALIZED = "Initialized";
+  public static final String JOB_STATE_RUNNING = "Running";
+  public static final String JOB_STATE_FINISHED = "Succeeded";
+  public static final String JOB_STATE_CANCELED = "Canceled";
+  public static final String JOB_STATE_CLOSED = "Closed";
+  public static final String JOB_STATE_ERROR = "Error";
+  public static final String JOB_STATE_PENDING = "Pending";
+
+  String getId();
+
+  void setId(String id);
+
+  String getOwner();
+
+  void setOwner(String owner);
+
+  String getTitle();
+
+  void setTitle(String title);
+
+  String getQueryFile();
+
+  void setQueryFile(String queryFile);
+
+  Long getDateSubmitted();
+
+  void setDateSubmitted(Long dateSubmitted);
+
+  Long getDuration();
+
+  void setDuration(Long duration);
+
+  String getStatus();
+
+  void setStatus(String status);
+
+  String getForcedContent();
+
+  void setForcedContent(String forcedContent);
+
+  String getQueryId();
+
+  void setQueryId(String queryId);
+
+  String getStatusDir();
+
+  void setStatusDir(String statusDir);
+
+  String getDataBase();
+
+  void setDataBase(String dataBase);
+
+  String getLogFile();
+
+  void setLogFile(String logFile);
+
+  String getConfFile();
+
+  void setConfFile(String confFile);
+
+  String getApplicationId();
+
+  void setApplicationId(String applicationId);
+
+  String getDagName();
+
+  void setDagName(String DagName);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobController.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobController.java
new file mode 100644
index 0000000..339e194
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobController.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
+
+import org.apache.ambari.view.hive.client.Cursor;
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+
+public interface JobController {
+  void submit();
+
+  void cancel() throws ItemNotFound;
+
+  Job getJob();
+
+  /**
+   * Use carefully. Returns unproxied bean object
+   * @return unproxied bean object
+   */
+  Job getJobPOJO();
+
+  Cursor getResults() throws ItemNotFound;
+
+  void afterCreation();
+
+  void update();
+
+  boolean isModified();
+
+  void clearModified();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerFactory.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerFactory.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerFactory.java
new file mode 100644
index 0000000..12d1cdb
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerFactory.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
+
+public class JobControllerFactory implements IJobControllerFactory {
+  private SharedObjectsFactory sharedObjectsFactory;
+  private ViewContext context;
+
+  public JobControllerFactory(ViewContext context, SharedObjectsFactory sharedObjectsFactory) {
+    this.sharedObjectsFactory = sharedObjectsFactory;
+    this.context = context;
+  }
+
+  @Override
+  public JobController createControllerForJob(Job job) {
+    return new JobControllerImpl(context, job,
+        sharedObjectsFactory.getHiveConnectionController(),
+        sharedObjectsFactory.getOperationHandleControllerFactory(),
+        sharedObjectsFactory.getSavedQueryResourceManager(),
+        sharedObjectsFactory.getATSParser(),
+        sharedObjectsFactory.getHdfsApi());
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
new file mode 100644
index 0000000..a100f3d
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
@@ -0,0 +1,343 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.client.*;
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.resources.jobs.*;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
+import org.apache.ambari.view.hive.resources.savedQueries.SavedQuery;
+import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryResourceManager;
+import org.apache.ambari.view.hive.utils.*;
+import org.apache.ambari.view.hive.utils.HdfsApi;
+import org.apache.ambari.view.hive.utils.HdfsUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.lang.reflect.Proxy;
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+public class JobControllerImpl implements JobController, ModifyNotificationDelegate {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(JobControllerImpl.class);
+
+  private ViewContext context;
+  private HdfsApi hdfsApi;
+  private Job jobUnproxied;
+  private Job job;
+  private boolean modified;
+
+  private OperationHandleControllerFactory opHandleControllerFactory;
+  private ConnectionController hiveSession;
+  private SavedQueryResourceManager savedQueryResourceManager;
+  private IATSParser atsParser;
+
+  /**
+   * JobController constructor
+   * Warning: Create JobControllers ONLY using JobControllerFactory!
+   */
+  public JobControllerImpl(ViewContext context, Job job,
+                           ConnectionController hiveSession,
+                           OperationHandleControllerFactory opHandleControllerFactory,
+                           SavedQueryResourceManager savedQueryResourceManager,
+                           IATSParser atsParser,
+                           HdfsApi hdfsApi) {
+    this.context = context;
+    setJobPOJO(job);
+    this.opHandleControllerFactory = opHandleControllerFactory;
+    this.hiveSession = hiveSession;
+    this.savedQueryResourceManager = savedQueryResourceManager;
+    this.atsParser = atsParser;
+    this.hdfsApi = hdfsApi;
+  }
+
+  public String getQueryForJob() {
+    FilePaginator paginator = new FilePaginator(job.getQueryFile(), hdfsApi);
+    String query;
+    try {
+      query = paginator.readPage(0);  //warning - reading only 0 page restricts size of query to 1MB
+    } catch (IOException e) {
+      throw new ServiceFormattedException("Error when reading file: " + e.toString(), e);
+    } catch (InterruptedException e) {
+      throw new ServiceFormattedException("Error when reading file: " + e.toString(), e);
+    }
+    return query;
+  }
+
+  private static final String DEFAULT_DB = "default";
+  public String getJobDatabase() {
+    if (job.getDataBase() != null) {
+      return job.getDataBase();
+    } else {
+      return DEFAULT_DB;
+    }
+  }
+
+  @Override
+  public void submit() {
+    setupHiveBeforeQueryExecute();
+
+    String query = getQueryForJob();
+    OperationHandleController handleController = hiveSession.executeQuery(query);
+
+    handleController.persistHandleForJob(job);
+
+//    atsParser.getHiveQuieryIdsList()
+  }
+
+  private void setupHiveBeforeQueryExecute() {
+    String database = getJobDatabase();
+    hiveSession.selectDatabase(database);
+  }
+
+  @Override
+  public void cancel() throws ItemNotFound {
+    OperationHandleController handle = opHandleControllerFactory.getHandleForJob(job);
+    handle.cancel();
+  }
+
+  @Override
+  public void update() {
+    updateOperationStatus();
+    updateOperationLogs();
+
+    updateJobDuration();
+  }
+
+  public void updateOperationStatus() {
+    try {
+
+      OperationHandleController handle = opHandleControllerFactory.getHandleForJob(job);
+      String status = handle.getOperationStatus();
+      job.setStatus(status);
+      LOG.debug("Status of job#" + job.getId() + " is " + job.getStatus());
+
+    } catch (NoOperationStatusSetException e) {
+      LOG.info("Operation state is not set for job#" + job.getId());
+
+    } catch (HiveErrorStatusException e) {
+      LOG.debug("Error updating status for job#" + job.getId() + ": " + e.getMessage());
+      job.setStatus(Job.JOB_STATE_UNKNOWN);
+
+    } catch (HiveClientException e) {
+      throw new ServiceFormattedException("Could not fetch job status " + job.getId(), e);
+
+    } catch (ItemNotFound itemNotFound) {
+      LOG.debug("No TOperationHandle for job#" + job.getId() + ", can't update status");
+    }
+  }
+
+  public void updateOperationLogs() {
+    try {
+      OperationHandleController handle = opHandleControllerFactory.getHandleForJob(job);
+      String logs = handle.getLogs();
+
+      LogParser info = LogParser.parseLog(logs);
+      LogParser.AppId app = info.getLastAppInList();
+      if (app != null) {
+        job.setApplicationId(app.getIdentifier());
+      }
+
+      String logFilePath = job.getLogFile();
+      HdfsUtil.putStringToFile(hdfsApi, logFilePath, logs);
+
+    } catch (HiveClientRuntimeException ex) {
+      LOG.error("Error while fetching logs: " + ex.getMessage());
+    } catch (ItemNotFound itemNotFound) {
+      LOG.debug("No TOperationHandle for job#" + job.getId() + ", can't read logs");
+    }
+  }
+
+  public boolean isJobEnded() {
+    String status = job.getStatus();
+    return status.equals(Job.JOB_STATE_FINISHED) || status.equals(Job.JOB_STATE_CANCELED) ||
+        status.equals(Job.JOB_STATE_CLOSED) || status.equals(Job.JOB_STATE_ERROR) ||
+        status.equals(Job.JOB_STATE_UNKNOWN); // Unknown is not finished, but polling makes no sense
+  }
+
+  @Override
+  public Job getJob() {
+    return job;
+  }
+
+  /**
+   * Use carefully. Returns unproxied bean object
+   * @return unproxied bean object
+   */
+  @Override
+  public Job getJobPOJO() {
+    return jobUnproxied;
+  }
+
+  public void setJobPOJO(Job jobPOJO) {
+    Job jobModifyNotificationProxy = (Job) Proxy.newProxyInstance(jobPOJO.getClass().getClassLoader(),
+        new Class[]{Job.class},
+        new ModifyNotificationInvocationHandler(jobPOJO, this));
+    this.job = jobModifyNotificationProxy;
+
+    this.jobUnproxied = jobPOJO;
+  }
+
+  @Override
+  public Cursor getResults() throws ItemNotFound {
+    OperationHandleController handle = opHandleControllerFactory.getHandleForJob(job);
+    return handle.getResults();
+  }
+
+  @Override
+  public void afterCreation() {
+    setupStatusDirIfNotPresent();
+    setupQueryFileIfNotPresent();
+    setupLogFileIfNotPresent();
+
+    setCreationDate();
+  }
+
+  public void setupLogFileIfNotPresent() {
+    if (job.getLogFile() == null || job.getLogFile().isEmpty()) {
+      setupLogFile();
+    }
+  }
+
+  public void setupQueryFileIfNotPresent() {
+    if (job.getQueryFile() == null || job.getQueryFile().isEmpty()) {
+      setupQueryFile();
+    }
+  }
+
+  public void setupStatusDirIfNotPresent() {
+    if (job.getStatusDir() == null || job.getStatusDir().isEmpty()) {
+      setupStatusDir();
+    }
+  }
+
+  private static final long MillisInSecond = 1000L;
+
+  public void updateJobDuration() {
+    job.setDuration(System.currentTimeMillis() / MillisInSecond - job.getDateSubmitted());
+  }
+
+  public void setCreationDate() {
+    job.setDateSubmitted(System.currentTimeMillis() / MillisInSecond);
+  }
+
+
+
+  private void setupLogFile() {
+    LOG.debug("Creating log file for job#" + job.getId());
+
+    String logFile = job.getStatusDir() + "/" + "logs";
+    HdfsUtil.putStringToFile(hdfsApi, logFile, "");
+
+    job.setLogFile(logFile);
+    LOG.debug("Log file for job#" + job.getId() + ": " + logFile);
+  }
+
+  private void setupStatusDir() {
+    String newDirPrefix = makeStatusDirectoryPrefix();
+    String newDir = HdfsUtil.findUnallocatedFileName(hdfsApi, newDirPrefix, "");
+
+    job.setStatusDir(newDir);
+    LOG.debug("Status dir for job#" + job.getId() + ": " + newDir);
+  }
+
+  private String makeStatusDirectoryPrefix() {
+    String userScriptsPath = context.getProperties().get("jobs.dir");
+
+    if (userScriptsPath == null) { // TODO: move check to initialization code
+      String msg = "jobs.dir is not configured!";
+      LOG.error(msg);
+      throw new MisconfigurationFormattedException("jobs.dir");
+    }
+
+    String normalizedName = String.format("hive-job-%s", job.getId());
+    String timestamp = new SimpleDateFormat("yyyy-MM-dd_hh-mm").format(new Date());
+    return String.format(userScriptsPath +
+        "/%s-%s", normalizedName, timestamp);
+  }
+
+  private void setupQueryFile() {
+    String statusDir = job.getStatusDir();
+    assert statusDir != null : "setupStatusDir() should be called first";
+
+    String jobQueryFilePath = statusDir + "/" + "query.hql";
+
+    try {
+
+      if (job.getForcedContent() != null) {
+
+        HdfsUtil.putStringToFile(hdfsApi, jobQueryFilePath, job.getForcedContent());
+        job.setForcedContent("");  // prevent forcedContent to be written to DB
+
+      }
+      else if (job.getQueryId() != null) {
+
+        String savedQueryFile = getRelatedSavedQueryFile();
+        hdfsApi.copy(savedQueryFile, jobQueryFilePath);
+        job.setQueryFile(jobQueryFilePath);
+
+      } else {
+
+        throw new BadRequestFormattedException("queryId or forcedContent should be passed!", null);
+
+      }
+
+    } catch (IOException e) {
+      throw new ServiceFormattedException("Error in creation: " + e.toString(), e);
+    } catch (InterruptedException e) {
+      throw new ServiceFormattedException("Error in creation: " + e.toString(), e);
+    }
+    job.setQueryFile(jobQueryFilePath);
+
+    LOG.debug("Query file for job#" + job.getId() + ": " + jobQueryFilePath);
+  }
+
+  private String getRelatedSavedQueryFile() {
+    SavedQuery savedQuery;
+    try {
+      savedQuery = savedQueryResourceManager.read(job.getQueryId());
+    } catch (ItemNotFound itemNotFound) {
+      throw new BadRequestFormattedException("queryId not found!", itemNotFound);
+    }
+    return savedQuery.getQueryFile();
+  }
+
+  @Override
+  public boolean onModification(Object object) {
+    setModified(true);
+    return true;
+  }
+
+  @Override
+  public boolean isModified() {
+    return modified;
+  }
+
+  public void setModified(boolean modified) {
+    this.modified = modified;
+  }
+
+  @Override
+  public void clearModified() {
+    setModified(false);
+  }
+}


[2/6] ambari git commit: AMBARI-10035. Hive View: Retrieve history from ATS (alexantonenko)

Posted by al...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/index/history-query/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/index/history-query/index.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/index/history-query/index.js
index be5d1f9..5959938 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/index/history-query/index.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/routes/index/history-query/index.js
@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.Route.extend({
   setupController: function (controller, model) {
@@ -28,10 +29,16 @@ export default Ember.Route.extend({
       subroute = existingTab.get('subroute');
     }
 
-    if (subroute) {
-      this.transitionTo(subroute, model);
+    // filter out hdfs jobs
+    if (utils.isInteger(model.get('id'))) {
+      if (subroute) {
+        this.transitionTo(subroute, model);
+      } else {
+        this.transitionTo(constants.namingConventions.subroutes.jobLogs, model);
+      }
     } else {
-      this.transitionTo(constants.namingConventions.subroutes.jobLogs, model);
+      this.transitionTo(constants.namingConventions.subroutes.historyQuery, model);
+      this.controllerFor(constants.namingConventions.routes.index).set('model', model);
     }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss b/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
index 6a1d35c..7085c85 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
@@ -20,6 +20,7 @@
 
 $panel-background: #f5f5f5;
 $placeholder-color: #aaa;
+$border-color: #ddd;
 
 @-webkit-keyframes fadeIn {
   0% {opacity: 0;}
@@ -50,10 +51,23 @@ $placeholder-color: #aaa;
   -webkit-animation-name: fadeOut;
           animation-name: fadeOut;
 }
+
 #content {
   padding: 20px 0;
 }
 
+#index-content {
+  display: flex;
+}
+
+#visual-explain, #tez-ui {
+  position: absolute;
+  left: 0;
+  width: 0;
+  z-index: 99;
+  background: white;
+}
+
 #alerts-container {
   position: absolute;
   left: 15px;
@@ -78,7 +92,7 @@ aside  hr {
 }
 
 .toolbox {
-  margin-top: 15px;
+  margin: 15px 15px 0 0;
 
   insert-udfs {
     display: inline-block;
@@ -97,6 +111,10 @@ aside  hr {
   color: $placeholder-color;
 }
 
+.form-group {
+  margin-bottom: 0;
+}
+
 .secondary-row {
   background: $panel-background;
 }
@@ -109,12 +127,12 @@ aside  hr {
 }
 
 .CodeMirror {
-  border: 0 1px solid #ddd;
+  border: 0 1px solid $border-color;
 }
 
 .grip {
   height: 20px;
-  border: 0 1px 1px solid #ddd;
+  border: 0 1px 1px solid $border-color;
   background-color: $panel-background;
   color: #bbb;
   text-align: center;
@@ -133,19 +151,19 @@ aside  hr {
   background-color: white;
 }
 
-.Unknown {
+.UNKNOWN {
   color: gray;
 }
 
-.Running, .Pending, .Initialized, .fa-edit {
+.RUNNING, .PENDING, .INITIALIZED, .fa-edit {
   color: orange;
 }
 
-.Finished {
+.SUCCEEDED {
   color: green;
 }
 
-.Canceled, .Error {
+.CANCELED, .ERROR {
   color: red;
 }
 
@@ -153,7 +171,7 @@ dropdown .fa-remove {
   color: red;
 }
 
-.Closed {
+.CLOSED {
   color: blue;
 }
 
@@ -162,6 +180,35 @@ dropdown .fa-remove {
   padding-right: 0;
 }
 
+.main-content {
+  flex-grow: 1;
+}
+
+.query-menu {
+  margin-top: 57px;
+
+  span, popover {
+    cursor: pointer;
+    overflow: hidden;
+    display: block;
+    border-bottom: 1px solid $border-color;
+    padding: 10px;
+  }
+}
+
+.queries-icon {
+  font-size: 20px;
+
+  &.active {
+    color: #428bca;
+  }
+
+  &.text-icon {
+    font-size: 12px;
+    font-weight: 800;
+  }
+}
+
 .alert {
   margin-bottom: 5px;
   padding-bottom: 10px;
@@ -231,7 +278,7 @@ body {
   height: 36px;
   background: url("/img/spinner.gif");
   background-repeat: no-repeat;
-  margin: 0px auto;
+  margin: 0 auto;
 
   &.small {
     background-size: 20px;
@@ -257,25 +304,18 @@ body {
   padding-right: 0 !important;
 }
 
-.popover-right {
-  z-index: 92;
-  float: right;
-  position: relative;
-}
-
 .query-editor-panel .panel-body {
   position: relative;
+  padding-right: 0;
 }
-.settings-toggle {
+
+.settings-containers-toggle {
   position: absolute;
   top: 0;
   right: 25px;
   cursor: pointer;
 }
 
-.settings-toggle.active {
-  color: #428bca;
-}
 .settings-container {
   width: 100%;
   overflow-y: scroll;
@@ -286,8 +326,8 @@ body {
   position: absolute;
   padding: 0 15px;
   z-index: 1000;
-  border: 1px solid #ddd;
 
+  border: 1px solid $border-color;
   -webkit-animation-duration: .5s;
           animation-duration: .5s;
   -webkit-animation-fill-mode: both;
@@ -302,14 +342,25 @@ body {
 
 .setting {
   float: left;
-  margin: 0 10px 10px 0;
+  padding-right: 10px;
+  padding-top: 10px;
+
+  .input-group {
+    width: 100%;
+  }
+  .input-group-addon {
+    text-align: justify;
+    width: 50%;
+  }
 }
 
 .setting .remove {
   line-height: 30px;
-  margin-left: 10px;
   font-size: 18px;
   cursor: pointer;
+  position: absolute;
+  right: -5px;
+  top: -10px;
 }
 
 tabs {
@@ -320,8 +371,3 @@ tabs {
 tree-view ul li {
   padding-left: 10px;
 }
-
-.runOnTez {
-  float: right;
-  margin: 0
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/_typeahead-widget.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/_typeahead-widget.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/_typeahead-widget.hbs
new file mode 100644
index 0000000..4083ad6
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/_typeahead-widget.hbs
@@ -0,0 +1,17 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/popover-widget.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/popover-widget.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/popover-widget.hbs
index fee80c7..f7c6d9c 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/popover-widget.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/popover-widget.hbs
@@ -16,4 +16,4 @@
 * limitations under the License.
 }}
 
-<div class="hide"> {{yield}} </div>
+<span class="hide"> {{yield}} </span>

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-search-results.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-search-results.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-search-results.hbs
index 38f6b66..354d7bd 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-search-results.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-search-results.hbs
@@ -45,4 +45,4 @@
   </div>
 {{else}}
   <h4>{{t "labels.noTablesMatches"}} "{{tablesSearchTerm}}"</h4>
-{{/if}}
\ No newline at end of file
+{{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases.hbs
index 115bd70..a0ce19f 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases.hbs
@@ -21,8 +21,8 @@
 
     {{typeahead-widget
         content=model
-        optionValuePath="content.id"
-        optionLabelPath="content.name"
+        optionValuePath="id"
+        optionLabelPath="name"
         selection=selectedDatabase
     }}
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs
index 22c2ff3..313a233 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs
@@ -47,8 +47,8 @@
           {{item.title}}
         {{/link-to}}
         </td>
-        <td {{bind-attr class=item.status}}>{{item.status}}</td>
-        <td>{{date-binding item "dateSubmitted"}}</td>
+        <td {{bind-attr class=item.uppercaseStatus}}>{{all-uppercase item.status}}</td>
+        <td>{{date-binding item "dateSubmittedTimestamp"}}</td>
         <td>{{item.duration}}</td>
         <td>
           <a class="fa fa-expand pull-right"></a>

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/index.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/index.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/index.hbs
index 622af66..b8d1d3b 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/index.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/index.hbs
@@ -16,67 +16,87 @@
 * limitations under the License.
 }}
 
-<aside class="col-md-3 col-xs-12 pull-left no-padding">
-  {{render 'databases'}}
-</aside>
+<div id="index-content">
+  <div class="main-content">
+    <aside class="col-md-3 col-xs-12 no-padding">
+      {{render 'databases'}}
+    </aside>
 
-<div class="col-md-9 col-xs 12 pull-left query-container">
+    <div class="col-md-9 col-xs-12 query-container">
+      {{render 'alerts'}}
 
-  {{render 'alerts'}}
-  {{#panel-widget headingTranslation="titles.query.editor" classNames="query-editor-panel"}}
-    {{#popover-widget classNames="fa fa-info-circle popover-right" titleTranslation="popover.queryEditorHelp.title" }}
-      <ul>
-        <li>{{t 'popover.queryEditorHelp.content.line1'}}</li>
-        <li>{{t 'popover.queryEditorHelp.content.line2'}}</li>
-        <li>{{t 'popover.queryEditorHelp.content.line3'}}</li>
-      </ul>
-    {{/popover-widget}}
+      {{#panel-widget headingTranslation="titles.query.editor" classNames="query-editor-panel"}}
+        {{render 'open-queries'}}
 
-    {{render 'open-queries'}}
+        <div class="toolbox">
+          <button type="button" class="btn btn-sm btn-success execute-query"
+                  {{bind-attr class="canExecute::disabled"}}
+                  {{action "executeQuery"}}>
+            {{t "buttons.execute"}}
+          </button>
+          <button type="button" class="btn btn-sm btn-default"
+                  {{bind-attr class="canExecute::disabled"}}
+                  {{action "explainQuery"}}>
+            {{t "buttons.explain"}}
+          </button>
+          <button type="button" class="btn btn-sm btn-default save-query-as" {{action "saveQuery"}}>{{t "buttons.saveAs"}}</button>
 
-    <div class="toolbox">
-      <button type="button" class="btn btn-sm btn-success execute-query"
-              {{bind-attr class="canExecute::disabled"}}
-              {{action "executeQuery"}}>
-        {{t "buttons.execute"}}
-      </button>
-      <button type="button" class="btn btn-sm btn-default"
-              {{bind-attr class="canExecute::disabled"}}
-              {{action "explainQuery"}}>
-        {{t "buttons.explain"}}
-      </button>
-      <button type="button" class="btn btn-sm btn-default save-query-as" {{action "saveQuery"}}>{{t "buttons.saveAs"}}</button>
+          {{render 'insert-udfs'}}
 
-      {{render 'insert-udfs'}}
+          <button type="button" class="btn btn-sm btn-primary  pull-right" {{action "addQuery"}}>{{t "buttons.newQuery"}}</button>
+        </div>
+      {{/panel-widget}}
 
-      <button type="button" class="btn btn-sm btn-primary  pull-right" {{action "addQuery"}}>{{t "buttons.newQuery"}}</button>
-    </div>
-  {{/panel-widget}}
-
-  {{#if queryParams}}
-    {{#panel-widget headingTranslation="titles.query.parameters"}}
-      <div class="form-horizontal">
-        {{#each param in queryParams}}
-          <div {{bind-attr class=":form-group param.value:has-success:has-error"}}>
-            <label class="col-sm-3 control-label">{{param.name}}</label>
-              <div class="col-sm-9">
-                {{input value=param.value placeholder="value" class="form-control"}}
+      {{#if queryParams}}
+        {{#panel-widget headingTranslation="titles.query.parameters"}}
+          <div class="form-horizontal">
+            {{#each param in queryParams}}
+              <div {{bind-attr class=":form-group param.value:has-success:has-error"}}>
+                <label class="col-sm-3 control-label">{{param.name}}</label>
+                  <div class="col-sm-9">
+                    {{input value=param.value placeholder="value" class="form-control"}}
+                  </div>
               </div>
+            {{/each}}
           </div>
-        {{/each}}
-      </div>
-    {{/panel-widget}}
+        {{/panel-widget}}
+      {{/if}}
+
+      {{#if displayJobTabs}}
+        {{#panel-widget headingTranslation="titles.query.process"
+                        isLoading=model.isRunning
+                        menuItems=downloadMenu
+                        menuHeadingTranslation="titles.download"
+                        classNames="query-process-results-panel"}}
+          {{#tabs-widget tabs=queryProcessTabs selectedTab=selectedQueryProcessTab}}
+            {{outlet}}
+          {{/tabs-widget}}
+        {{/panel-widget}}
+      {{/if}}
+    </div>
+  </div>
+
+  {{#if tezUI.showOverlay}}
+    {{render 'tez-ui'}}
   {{/if}}
 
-  {{#if displayJobTabs}}
-    {{#panel-widget headingTranslation="titles.query.process"
-                    isLoading=model.isRunning
-                    menuItems=downloadMenu
-                    menuHeadingTranslation="titles.download"
-                    classNames="query-process-results-panel"}}
-      {{#tabs-widget tabs=queryProcessTabs selectedTab=selectedQueryProcessTab}}
-        {{outlet}}
-      {{/tabs-widget}}
-    {{/panel-widget}}
+  {{#if visualExplain.showOverlay}}
+    {{render 'visual-explain'}}
   {{/if}}
+
+  <div class="query-menu">
+    {{#popover-widget classNames="fa fa-info-circle queries-icon" titleTranslation="popover.queryEditorHelp.title" }}
+      <ul>
+        <li>{{t 'popover.queryEditorHelp.content.line1'}}</li>
+        <li>{{t 'popover.queryEditorHelp.content.line2'}}</li>
+        <li>{{t 'popover.queryEditorHelp.content.line3'}}</li>
+      </ul>
+    {{/popover-widget}}
+
+    <span {{bind-attr class="settings.showOverlay:active :fa :fa-gear :queries-icon"}} {{action 'toggleOverlay' 'settings'}}></span>
+
+    <span {{bind-attr class="visualExplain.showOverlay:active :fa :fa-bar-chart :queries-icon"}} {{action 'toggleOverlay' 'visualExplain'}}></span>
+
+    <span {{bind-attr class="tezUI.showOverlay:active :queries-icon :text-icon"}} {{action 'toggleOverlay' 'tezUI'}}>TEZ</span>
+  </div>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/queries.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/queries.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/queries.hbs
index 6187dfe..692a462 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/queries.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/queries.hbs
@@ -41,38 +41,40 @@
   </thead>
   <tbody>
     {{#each query in this}}
-      <tr>
-        <td>
-          {{#link-to "index.savedQuery" query}}
-            {{query.shortQuery}}
-          {{/link-to}}
-        </td>
+      {{#unless query.isNew}}
+        <tr>
+          <td>
+            {{#link-to "index.savedQuery" query}}
+              {{query.shortQuery}}
+            {{/link-to}}
+          </td>
 
-        <td>
-          {{#link-to "index.savedQuery" query}}
-            {{query.title}}
-          {{/link-to}}
-        </td>
+          <td>
+            {{#link-to "index.savedQuery" query}}
+              {{query.title}}
+            {{/link-to}}
+          </td>
 
-        <td>{{query.dataBase}}</td>
+          <td>{{query.dataBase}}</td>
 
-        <td>{{query.owner}}</td>
+          <td>{{query.owner}}</td>
 
-        <td>
-          {{#unless query.isNew}}
-            <div class="btn-group pull-right">
-              <span data-toggle="dropdown">
-                <a class="fa fa-gear"></a>
-              </span>
-              <ul class="dropdown-menu" role="menu">
-                {{#each link in controller.links}}
-                  <li {{action 'executeAction' link query}}><a>{{tb-helper link}}</a></li>
-                {{/each}}
-              </ul>
-            </div>
-          {{/unless}}
-        </td>
-      </tr>
+          <td>
+            {{#unless query.isNew}}
+              <div class="btn-group pull-right">
+                <span data-toggle="dropdown">
+                  <a class="fa fa-gear"></a>
+                </span>
+                <ul class="dropdown-menu" role="menu">
+                  {{#each link in controller.links}}
+                    <li {{action 'executeAction' link query}}><a>{{tb-helper link}}</a></li>
+                  {{/each}}
+                </ul>
+              </div>
+            {{/unless}}
+          </td>
+        </tr>
+      {{/unless}}
     {{/each}}
   </tbody>
 </table>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs
index 15741802..791b7d0 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs
@@ -16,37 +16,44 @@
 * limitations under the License.
 }}
 
-<span {{bind-attr class="showSettingsOverlay:active :fa :fa-gear :settings-toggle"}} {{action 'toggleOverlay'}}></span>
+{{#if showOverlay}}
+  <div class="settings-container fadeIn">
+    <h3> Settings
+      <button class="btn btn-success btn-xs" {{action 'add'}}><i class="fa fa-plus"></i> Add</button>
+    </h3>
 
-{{#if showSettingsOverlay}}
-<div class="settings-container fadeIn">
-  <h3>Settings
-    <button class="btn btn-success btn-xs" {{action 'add'}}><i class="fa fa-plus"></i> Add</button>
+    {{#each setting in currentSettings.settings}}
+      <div class="setting col-md-6 col-sm-12">
+        <form>
+          <div class="form-group">
+            <div class="input-group">
+              <div class="input-group-addon">
+                {{typeahead-widget
+                    content=predefinedSettings
+                    optionLabelPath="name"
+                    optionValuePath="name"
+                    selection=setting.key
+                    create="addKey"
+                }}
+              </div>
+              <div {{bind-attr class=":input-group-addon setting.valid::has-error"}}>
 
-    <div class="checkbox btn btn-primary btn-xs runOnTez">
-        <label for="runOnTez">
-          {{input type="checkbox" checked=querySettings.runOnTez id="runOnTez"}} {{t "buttons.runOnTez"}}
-        </label>
-    </div>
-  </h3>
+                {{#if setting.key.values}}
+                  {{select-widget items=setting.key.values
+                                  labelPath="value"
+                                  selectedValue=setting.selection
+                                  defaultLabelTranslation="placeholders.select.value"
+                  }}
+                {{else}}
+                  {{input class="input-sm form-control" placeholderTranslation="placeholders.select.value" value=setting.selection.value}}
+                {{/if}}
 
-  {{#each setting in querySettings.settings}}
-    <div class="setting">
-      <form class="form-inline">
-        <div class="form-group">
-          <div class="input-group">
-            <div class="input-group-addon">
-              {{input class="input-sm form-control" placeholder="Key" value=setting.key}}
-            </div>
-            <div class="input-group-addon">
-              {{input class="input-sm form-control" placeholder="Value" value=setting.value}}
-              <span class="fa fa-times-circle remove" {{action 'remove' setting}}></span>
+                <span class="fa fa-times-circle remove pull-right" {{action 'remove' setting}}></span>
+              </div>
             </div>
           </div>
-        </div>
-      </form>
-    </div>
-  {{/each}}
-
-</div>
+        </form>
+      </div>
+    {{/each}}
+  </div>
 {{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/tez-ui.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/tez-ui.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/tez-ui.hbs
new file mode 100644
index 0000000..f9b8d9c
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/tez-ui.hbs
@@ -0,0 +1,22 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div id="tez-ui">
+  {{#panel-widget headingTranslation="titles.query.tez"}}
+  {{/panel-widget}}
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/visual-explain.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/visual-explain.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/visual-explain.hbs
new file mode 100644
index 0000000..a823ef2
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/visual-explain.hbs
@@ -0,0 +1,22 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div id="visual-explain">
+  {{#panel-widget headingTranslation="titles.query.visualExplain"}}
+  {{/panel-widget}}
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
index c9b1fda..888275d 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
@@ -17,6 +17,7 @@
  */
 
 import Ember from 'ember';
+import helpers from 'hive/utils/functions';
 
 export default Ember.Object.create({
   appTitle: 'Hive',
@@ -62,6 +63,8 @@ export default Ember.Object.create({
     database: 'database',
     databases: 'databases',
     openQueries: 'open-queries',
+    visualExplain: 'visual-explain',
+    tezUI: 'tez-ui',
     file: 'file',
     fileResource: 'file-resource',
     fileResources: 'file-resources',
@@ -79,15 +82,83 @@ export default Ember.Object.create({
     settings: 'settings'
   },
 
+  hiveParameters: [
+    {
+      name: 'hive.tez.container.size',
+      values: [
+        Ember.Object.create({ value: 'true' }),
+        Ember.Object.create({ value: 'false' })
+      ]
+    },
+    {
+      name: 'hive.prewarm.enabled',
+      validate: helpers.regexes.digits
+    },
+    {
+      name: 'hive.prewarm.numcontainers',
+      values: [
+        Ember.Object.create({ value: 'one' }),
+        Ember.Object.create({ value: 'two' }),
+        Ember.Object.create({ value: 'three' })
+      ]
+    },
+    {
+      name: 'hive.tez.auto.reducer.parallelism',
+      value: 'test'
+    },
+    {
+      name: 'hive.execution.engine'
+    },
+    {
+      name: 'hive.vectorized.execution.enabled'
+    },
+    {
+      name: 'tez.am.resource.memory.mb'
+    },
+    {
+      name: 'tez.am.container.idle.release-timeout-min.millis'
+    },
+    {
+      name: 'tez.am.container.idle.release-timeout-max.millis'
+    },
+    {
+      name: 'tez.queue.name'
+    },
+    {
+      name: 'tez.runtime.io.sort.mb'
+    },
+    {
+      name: 'tez.runtime.sort.threads'
+    },
+    {
+      name: 'tez.runtime.optimize.shared.fetch'
+    },
+    {
+      name: 'tez.runtime.compress.codec'
+    },
+    {
+      name: 'tez.runtime.shuffle.keep-alive.enabled'
+    },
+    {
+      name: 'tez.grouping.min-size'
+    },
+    {
+      name: 'tez.grouping.max-size'
+    },
+    {
+      name: 'tez.generate.debug.artifacts'
+    }
+  ],
+
   statuses: {
-    unknown: "Unknown",
-    initialized: "Initialized",
-    running: "Running",
-    finished: "Finished",
-    canceled: "Canceled",
-    closed: "Closed",
-    error: "Error",
-    pending: "Pending"
+    unknown: "UNKNOWN",
+    initialized: "INITIALIZED",
+    running: "RUNNING",
+    succeeded: "SUCCEEDED",
+    canceled: "CANCELED",
+    closed: "CLOSED",
+    error: "ERROR",
+    pending: "PENDING"
   },
 
   alerts: {

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/functions.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/functions.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/functions.js
index ab4f8e5..a63dc5b 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/functions.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/functions.js
@@ -22,7 +22,7 @@ import Ember from 'ember';
 
 export default Ember.Object.create({
   isInteger: function (x) {
-    return (x^0) === x;
+    return !isNaN(x);
   },
 
   isDate: function(date) {
@@ -31,6 +31,19 @@ export default Ember.Object.create({
 
   regexes: {
     allUppercase: /^[^a-z]*$/,
-    whitespaces: /^(\s*).*$/
+    whitespaces: /^(\s*).*$/,
+    digits: /^\d+$/
+  },
+
+  insensitiveCompare: function (sourceString) {
+    var args = Array.prototype.slice.call(arguments, 1);
+
+    if (!sourceString) {
+      return;
+    }
+
+    return args.find(function (arg) {
+      return sourceString.match(new RegExp('^' + arg + '$', 'i'));
+    });
   }
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/views/tez-ui.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/views/tez-ui.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/views/tez-ui.js
new file mode 100644
index 0000000..c14fd34
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/views/tez-ui.js
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.View.extend({
+  didInsertElement: function () {
+    var target = this.$('#tez-ui');
+
+    target.css('min-height', $('.main-content').height());
+    target.animate({ width: $('.main-content').width() }, 'fast');
+  },
+
+  willDestroyElement: function () {
+    var target = this.$('#tez-ui');
+
+    target.css('min-height', 0);
+    target.css('width', 0);
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/app/views/visual-explain.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/views/visual-explain.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/views/visual-explain.js
new file mode 100644
index 0000000..4a887e2
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/views/visual-explain.js
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.View.extend({
+  didInsertElement: function () {
+    var target = this.$('#visual-explain');
+
+    target.css('min-height', $('.main-content').height());
+    target.animate({ width: $('.main-content').width() }, 'fast');
+  },
+
+  willDestroyElement: function () {
+    var target = this.$('#visual-explain');
+
+    target.css('min-height', 0);
+    target.css('width', 0);
+  }
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/bower.json b/contrib/views/hive/src/main/resources/ui/hive-web/bower.json
index a4c2aca..161a626 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/bower.json
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/bower.json
@@ -4,20 +4,20 @@
     "handlebars": "2.0.0",
     "jquery": "^1.11.1",
     "ember": "1.9.0",
-    "ember-data": "1.0.0-beta.11",
+    "ember-data": "1.0.0-beta.14.1",
     "ember-resolver": "~0.1.7",
     "loader.js": "stefanpenner/loader.js#1.0.1",
     "ember-cli-shims": "stefanpenner/ember-cli-shims#0.0.3",
     "ember-cli-test-loader": "rwjblue/ember-cli-test-loader#0.0.4",
     "ember-load-initializers": "stefanpenner/ember-load-initializers#0.0.2",
-    "ember-qunit": "0.1.8",
-    "ember-qunit-notifications": "0.0.4",
+    "ember-qunit": "0.2.8",
+    "ember-qunit-notifications": "0.0.7",
     "qunit": "~1.15.0",
     "bootstrap": "~3.2.0",
     "ember-i18n": "~2.9.0",
     "blanket": "~1.1.5",
     "jquery-ui": "~1.11.2",
-    "selectize": "~0.11.2",
+    "selectize": "~0.12.0",
     "pretender": "0.1.0"
   },
   "resolutions": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/package.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/package.json b/contrib/views/hive/src/main/resources/ui/hive-web/package.json
index 52e908a..c2523b1 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/package.json
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/package.json
@@ -15,7 +15,7 @@
   },
   "repository": "https://github.com/stefanpenner/ember-cli",
   "engines": {
-    "node": ">= 0.10.0"
+    "node": ">= 0.10.32"
   },
   "author": "",
   "license": "MIT",
@@ -24,7 +24,7 @@
     "bower": ">= 1.3.12",
     "broccoli-asset-rev": "0.3.1",
     "broccoli-sass": "^0.3.2",
-    "ember-cli": "0.1.2",
+    "ember-cli": "0.1.15",
     "ember-cli-blanket": "^0.2.2",
     "ember-cli-content-security-policy": "0.3.0",
     "ember-cli-font-awesome": "0.0.4",
@@ -34,12 +34,12 @@
     "ember-cli-jquery-ui": "0.0.12",
     "ember-cli-moment": "0.0.1",
     "ember-cli-pretender": "^0.3.1",
-    "ember-cli-qunit": "0.1.0",
-    "ember-cli-selectize": "0.0.7",
-    "ember-data": "1.0.0-beta.10",
+    "ember-cli-qunit": "0.3.7",
+    "ember-cli-selectize": "0.0.19",
+    "ember-data": "1.0.0-beta.14.1",
     "ember-dynamic-component": "0.0.1",
     "ember-export-application-global": "^1.0.0",
     "express": "^4.8.5",
-    "glob": "^4.0.5"
+    "glob": "4.4.0"
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/tests/integration/query-editor-test.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/tests/integration/query-editor-test.js b/contrib/views/hive/src/main/resources/ui/hive-web/tests/integration/query-editor-test.js
index 235a225..19c2356 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/tests/integration/query-editor-test.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/tests/integration/query-editor-test.js
@@ -59,7 +59,7 @@ test('Can execute query', function() {
   click('.execute-query');
 
   andThen(function() {
-    ok(find('.query-process-results-panel .nav-tabs li:nth-child(2)').hasClass('active'), 'Results tab is visible');
+    equal(find('.query-process-results-panel').length, 1, 'Job tabs are visible.');
   });
 });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/history-test.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/history-test.js b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/history-test.js
index c43cfda..51d4112 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/history-test.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/history-test.js
@@ -40,10 +40,10 @@ test('date range is set correctly', function () {
 
   var history = Ember.ArrayProxy.create({ content: [
     Ember.Object.create({
-      dateSubmitted: min
+      dateSubmittedTimestamp: min
     }),
     Ember.Object.create({
-      dateSubmitted: max
+      dateSubmittedTimestamp: max
     })
   ]});
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/queries-test.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/queries-test.js b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/queries-test.js
index 0ed7dd7..d14f2cc 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/queries-test.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/queries-test.js
@@ -30,18 +30,3 @@ test('controller is initialized', function() {
 
   equal(component.get('columns.length'), 4, 'Columns are initialized correctly');
 });
-
-test('Should hide new queries', function() {
-  expect(1);
-
-  var queries = [
-    { isNew: true },
-    { isNew: false}
-  ];
-
-  var controller = this.subject({
-    queries: queries
-  });
-
-  equal(controller.get('model.length'), 1, 'Hide new queries from the list');
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/settings-test.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/settings-test.js b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/settings-test.js
new file mode 100644
index 0000000..ef1b3d8
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/settings-test.js
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import { moduleFor, test } from 'ember-qunit';
+
+moduleFor('controller:settings', 'SettingsController', {
+  needs: [
+    'controller:databases',
+    'controller:index',
+    'controller:open-queries',
+    'controller:loaded-files',
+    'controller:index/history-query/results',
+    'controller:index/history-query/explain',
+    'controller:columns',
+    'controller:udfs',
+    'controller:index/history-query/logs'
+  ]
+});
+
+test('can add a setting', function() {
+  var controller = this.subject();
+
+  ok(!controller.get('currentSettings.settings.length'), 'No initial settings');
+
+  Ember.run(function() {
+    controller.send('add');
+  });
+
+  equal(controller.get('currentSettings.settings.length'), 1, 'Can add settings');
+});
+
+test('hasSettings return true if there are settings', function() {
+  var controller = this.subject();
+
+  ok(!controller.hasSettings(null), 'No settings => return false');
+
+  Ember.run(function() {
+    controller.send('add');
+  });
+
+  ok(controller.hasSettings(null), '1 setting => returns true');
+});
+
+test('setSettingForQuery', function() {
+  var controller = this.subject();
+
+  var settings = [ Ember.Object.create({key: 'key', value: 'value'}) ];
+
+  Ember.run(function() {
+    controller.setSettingForQuery(1, settings);
+  });
+
+  equal(controller.get('currentSettings.settings.firstObject.key'), settings.get('key'), 'It sets the settings for specified query');
+});
+
+test('validate', function() {
+  var predefinedSettings = [
+    {
+      name: 'some.key',
+      validate: new RegExp(/^\d+$/) // digits
+    }
+  ];
+
+  var controller = this.subject({
+    predefinedSettings: predefinedSettings
+  });
+
+  var settings = [
+    Ember.Object.create({key: { name: 'some.key' }, value: 'value'}),
+    Ember.Object.create({key: { name: 'some.key' }, value: '123'})
+  ];
+
+  Ember.run(function() {
+    controller.setSettingForQuery(1, settings);
+  });
+
+  var currentSettings = controller.get('model.firstObject.settings');
+  console.log(currentSettings);
+  ok(!currentSettings.get('firstObject.valid'), "First setting doesn\' pass validataion");
+  ok(currentSettings.get('lastObject.valid'), 'Second setting passes validation');
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/tez-ui-test.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/tez-ui-test.js b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/tez-ui-test.js
new file mode 100644
index 0000000..f2755c5
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/tez-ui-test.js
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {
+  moduleFor,
+  test
+} from 'ember-qunit';
+
+moduleFor('controller:tez-ui', 'TezUiController', {
+  // Specify the other units that are required for this test.
+  // needs: ['controller:foo']
+});
+
+// Replace this with your real tests.
+test('it exists', function() {
+  var controller = this.subject();
+  ok(controller);
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/visual-explain-test.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/visual-explain-test.js b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/visual-explain-test.js
new file mode 100644
index 0000000..1ab84e2
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/visual-explain-test.js
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {
+  moduleFor,
+  test
+} from 'ember-qunit';
+
+moduleFor('controller:visual-explain', 'VisualExplainController', {
+  // Specify the other units that are required for this test.
+  // needs: ['controller:foo']
+});
+
+// Replace this with your real tests.
+test('it exists', function() {
+  var controller = this.subject();
+  ok(controller);
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/views/visual-explain-test.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/views/visual-explain-test.js b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/views/visual-explain-test.js
new file mode 100644
index 0000000..97faea6
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/views/visual-explain-test.js
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {
+  moduleFor,
+  test
+} from 'ember-qunit';
+
+moduleFor('view:visual-explain', 'VisualExplainView');
+
+// Replace this with your real tests.
+test('it exists', function() {
+  var view = this.subject();
+  ok(view);
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/view.xml b/contrib/views/hive/src/main/resources/view.xml
index b936c6d..b1d9ce6 100644
--- a/contrib/views/hive/src/main/resources/view.xml
+++ b/contrib/views/hive/src/main/resources/view.xml
@@ -19,22 +19,30 @@
     <label>Hive</label>
     <version>0.0.1</version>
 
+    <min-ambari-version>1.7.*</min-ambari-version>
+
     <!-- HDFS Configs -->
     <parameter>
         <name>webhdfs.url</name>
-        <description>WebHDFS FileSystem URI (example: webhdfs://namenode:50070)</description>
+        <description>Enter the WebHDFS FileSystem URI. Typically this is the dfs.namenode.http-address property in the hdfs-site.xml configuration. URL must be accessible from Ambari Server.</description>
+        <label>WebHDFS FileSystem URI</label>
+        <placeholder>webhdfs://namenode:50070</placeholder>
         <required>true</required>
     </parameter>
 
     <parameter>
         <name>webhdfs.username</name>
-        <description>User and doAs for proxy user for HDFS</description>
+        <description>doAs for proxy user for HDFS. By default, uses the currently logged-in Ambari user.</description>
+        <label>WebHDFS Username</label>
         <required>false</required>
     </parameter>
 
     <parameter>
         <name>webhdfs.auth</name>
-        <description>Semicolon-separated authentication configs. Default: auth=SIMPLE</description>
+        <description>Semicolon-separated authentication configs.</description>
+        <label>WebHDFS Authentication</label>
+        <placeholder>auth=SIMPLE</placeholder>
+        <default-value>auth=SIMPLE</default-value>
         <required>false</required>
     </parameter>
 
@@ -42,40 +50,62 @@
 
     <parameter>
         <name>dataworker.username</name>
-        <description>The username (defaults to ViewContext username)</description>
+        <description>The dataworker username. By default, users the currently logged-in Ambari user.</description>
+        <label>Dataworker Username</label>
         <required>false</required>
     </parameter>
 
     <parameter>
         <name>scripts.dir</name>
-        <description>HDFS directory path to store Hive scripts (example: /users/${username})</description>
+        <description>HDFS directory path to store Hive scripts.</description>
+        <label>Scripts HDFS Directory</label>
+        <placeholder>/user/${username}/hive/scripts</placeholder>
+        <default-value>/user/${username}/hive/scripts</default-value>
         <required>true</required>
     </parameter>
 
     <parameter>
         <name>jobs.dir</name>
-        <description>HDFS directory path to store Hive job status (example: /users/${username})</description>
+        <description>HDFS directory path to store Hive job status.</description>
+        <label>Jobs HDFS Directory</label>
+        <placeholder>/user/${username}/hive/jobs</placeholder>
+        <default-value>/user/${username}/hive/jobs</default-value>
         <required>true</required>
     </parameter>
 
     <parameter>
         <name>hive.host</name>
-        <description>HiveServer2 hostname or IP (example: 127.0.0.1)</description>
+        <description>Enter the HiveServer2 host. Host must be accessible from Ambari Server.</description>
+        <label>HiveServer2 Host</label>
+        <placeholder>127.0.0.1</placeholder>
         <required>true</required>
     </parameter>
 
     <parameter>
         <name>hive.port</name>
-        <description>HiveServer2 Thrift port (example: 10000)</description>
+        <description>HiveServer2 Thrift port (example: 10000).</description>
+        <label>HiveServer2 Thrift port</label>
+        <placeholder>10000</placeholder>
+        <default-value>10000</default-value>
         <required>true</required>
     </parameter>
 
     <parameter>
         <name>hive.auth</name>
-        <description>Semicolon-separated authentication configs. Default: auth=NOSASL</description>
+        <description>Semicolon-separated authentication configs.</description>
+        <label>Hive Authentication</label>
+        <placeholder>auth=NONE</placeholder>
+        <default-value>auth=NONE</default-value>
         <required>false</required>
     </parameter>
 
+    <parameter>
+        <name>yarn.ats.url</name>
+        <description>The URL to the YARN Application Timeline Server, used to provide Jobs information, typically, this is the yarn.timeline-service.webapp.address property in the yarn-site.xml configuration.</description>
+        <placeholder>http://yarn.ats.address:8188</placeholder>
+        <required>true</required>
+    </parameter>
+
     <resource>
         <name>savedQuery</name>
         <plural-name>savedQueries</plural-name>
@@ -107,7 +137,7 @@
         <name>job</name>
         <plural-name>jobs</plural-name>
         <id-property>id</id-property>
-        <resource-class>org.apache.ambari.view.hive.resources.jobs.JobImpl</resource-class>
+        <resource-class>org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl</resource-class>
         <provider-class>org.apache.ambari.view.hive.resources.jobs.JobResourceProvider</provider-class>
         <service-class>org.apache.ambari.view.hive.resources.jobs.JobService</service-class>
     </resource>
@@ -133,7 +163,7 @@
             <id-property>id</id-property>
         </entity>
         <entity>
-            <class>org.apache.ambari.view.hive.resources.jobs.JobImpl</class>
+            <class>org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl</class>
             <id-property>id</id-property>
         </entity>
         <entity>

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
index 7ce2dd3..a5a0f48 100644
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
@@ -55,7 +55,12 @@ public class FileServiceTest extends HDFSTest {
   @AfterClass
   public static void shutDown() throws Exception {
     HDFSTest.shutDown(); // super
-    HdfsApi.dropAllConnections(); //cleanup API connection
+  }
+
+  @Override
+  @After
+  public void tearDown() throws Exception {
+    fileService.getSharedObjectsFactory().clear(HdfsApi.class);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/db973127/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/ATSParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/ATSParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/ATSParserTest.java
new file mode 100644
index 0000000..ced7772
--- /dev/null
+++ b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/ATSParserTest.java
@@ -0,0 +1,411 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs;
+
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.*;
+import org.apache.commons.codec.binary.Base64;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.junit.Assert;
+import org.junit.Test;
+import sun.reflect.generics.reflectiveObjects.NotImplementedException;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class ATSParserTest {
+  @Test
+  public void testBase64() throws Exception {
+    System.out.println(Arrays.toString(Base64.decodeBase64("HWvpjKiERZCy_le4s-odOQ")));
+  }
+
+  @Test
+  public void testGetHiveJobsList() throws Exception {
+    IATSParser jobLoader = new ATSParser(new ATSRequestsDelegateStub());
+
+    List<HiveQueryId> jobs = jobLoader.getHiveQuieryIdsList("hive");
+
+    Assert.assertEquals(1, jobs.size());
+
+    HiveQueryId job = jobs.get(0);
+    Assert.assertEquals("hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0", job.entity);
+    Assert.assertEquals(1423493324L, job.starttime);
+    Assert.assertEquals("hive", job.user);
+    Assert.assertEquals(1423493342L - 1423493324L, job.duration);
+    Assert.assertEquals("select count(*) from z", job.query);
+
+    Assert.assertEquals(1, job.dagNames.size());
+    Assert.assertEquals("hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4", job.dagNames.get(0));
+
+    Assert.assertEquals(2, job.stages.size());
+  }
+
+  @Test
+  public void testGetTezDAGByName() throws Exception {
+    IATSParser jobLoader = new ATSParser(new ATSRequestsDelegateStub());
+
+    TezDagId tezDag = jobLoader.getTezDAGByName("hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4");
+
+    Assert.assertEquals("application_1423156117563_0005", tezDag.applicationId);
+    Assert.assertEquals("SUCCEEDED", tezDag.status);
+  }
+
+  protected static class ATSRequestsDelegateStub implements ATSRequestsDelegate {
+
+    @Override
+    public JSONObject hiveQueryIdList(String username) {
+      return (JSONObject) JSONValue.parse(
+          "{ \"entities\" : [ { \"domain\" : \"DEFAULT\",\n" +
+              "        \"entity\" : \"hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0\",\n" +
+              "        \"entitytype\" : \"HIVE_QUERY_ID\",\n" +
+              "        \"events\" : [ { \"eventinfo\" : {  },\n" +
+              "              \"eventtype\" : \"QUERY_COMPLETED\",\n" +
+              "              \"timestamp\" : 1423493342843\n" +
+              "            },\n" +
+              "            { \"eventinfo\" : {  },\n" +
+              "              \"eventtype\" : \"QUERY_SUBMITTED\",\n" +
+              "              \"timestamp\" : 1423493324355\n" +
+              "            }\n" +
+              "          ],\n" +
+              "        \"otherinfo\" : { \"MAPRED\" : false,\n" +
+              "            \"QUERY\" : \"{\\\"queryText\\\":\\\"select count(*) from z\\\",\\\"queryPlan\\\":{\\\"STAGE PLANS\\\":{\\\"Stage-1\\\":{\\\"Tez\\\":{\\\"DagName:\\\":\\\"hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4\\\",\\\"Vertices:\\\":{\\\"Reducer 2\\\":{\\\"Reduce Operator Tree:\\\":{\\\"Group By Operator\\\":{\\\"mode:\\\":\\\"mergepartial\\\",\\\"aggregations:\\\":[\\\"count(VALUE._col0)\\\"],\\\"outputColumnNames:\\\":[\\\"_col0\\\"],\\\"children\\\":{\\\"Select Operator\\\":{\\\"expressions:\\\":\\\"_col0 (type: bigint)\\\",\\\"outputColumnNames:\\\":[\\\"_col0\\\"],\\\"children\\\":{\\\"File Output Operator\\\":{\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\",\\\"compressed:\\\":\\\"false\\\",\\\"table:\\\":{\\\"serde:\\\":\\\"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe\\\",\\\"input format:\\\":\\\"org.apache.hadoop.mapred.TextInputFormat\\\",\\\"output format:\\\":\\\"org.apache.hadoop.hive.
 ql.io.HiveIgnoreKeyTextOutputFormat\\\"}}},\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}}},\\\"Map 1\\\":{\\\"Map Operator Tree:\\\":[{\\\"TableScan\\\":{\\\"alias:\\\":\\\"z\\\",\\\"children\\\":{\\\"Select Operator\\\":{\\\"children\\\":{\\\"Group By Operator\\\":{\\\"mode:\\\":\\\"hash\\\",\\\"aggregations:\\\":[\\\"count()\\\"],\\\"outputColumnNames:\\\":[\\\"_col0\\\"],\\\"children\\\":{\\\"Reduce Output Operator\\\":{\\\"sort order:\\\":\\\"\\\",\\\"value expressions:\\\":\\\"_col0 (type: bigint)\\\",\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows: 0 Data size: 40 Basic stats: PARTIAL Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows
 : 0 Data size: 40 Basic stats: PARTIAL Column stats: COMPLETE\\\"}}]}},\\\"Edges:\\\":{\\\"Reducer 2\\\":{\\\"parent\\\":\\\"Map 1\\\",\\\"type\\\":\\\"SIMPLE_EDGE\\\"}}}},\\\"Stage-0\\\":{\\\"Fetch Operator\\\":{\\\"limit:\\\":\\\"-1\\\",\\\"Processor Tree:\\\":{\\\"ListSink\\\":{}}}}},\\\"STAGE DEPENDENCIES\\\":{\\\"Stage-1\\\":{\\\"ROOT STAGE\\\":\\\"TRUE\\\"},\\\"Stage-0\\\":{\\\"DEPENDENT STAGES\\\":\\\"Stage-1\\\"}}}}\",\n" +
+              "            \"STATUS\" : true,\n" +
+              "            \"TEZ\" : true\n" +
+              "          },\n" +
+              "        \"primaryfilters\" : { \"user\" : [ \"hive\" ] },\n" +
+              "        \"relatedentities\" : {  },\n" +
+              "        \"starttime\" : 1423493324355\n" +
+              "      } ] }"
+      );
+    }
+
+      @Override
+      public JSONObject hiveQueryIdByOperationId(String operationId) {
+          throw new NotImplementedException();
+      }
+
+      @Override
+    public JSONObject tezDagByName(String name) {
+      return (JSONObject) JSONValue.parse(
+          "{ \"entities\" : [ { \"domain\" : \"DEFAULT\",\n" +
+              "        \"entity\" : \"dag_1423156117563_0005_2\",\n" +
+              "        \"entitytype\" : \"TEZ_DAG_ID\",\n" +
+              "        \"events\" : [ { \"eventinfo\" : {  },\n" +
+              "              \"eventtype\" : \"DAG_FINISHED\",\n" +
+              "              \"timestamp\" : 1423493342484\n" +
+              "            },\n" +
+              "            { \"eventinfo\" : {  },\n" +
+              "              \"eventtype\" : \"DAG_STARTED\",\n" +
+              "              \"timestamp\" : 1423493325803\n" +
+              "            },\n" +
+              "            { \"eventinfo\" : {  },\n" +
+              "              \"eventtype\" : \"DAG_INITIALIZED\",\n" +
+              "              \"timestamp\" : 1423493325794\n" +
+              "            },\n" +
+              "            { \"eventinfo\" : {  },\n" +
+              "              \"eventtype\" : \"DAG_SUBMITTED\",\n" +
+              "              \"timestamp\" : 1423493325578\n" +
+              "            }\n" +
+              "          ],\n" +
+              "        \"otherinfo\" : { \"applicationId\" : \"application_1423156117563_0005\",\n" +
+              "            \"counters\" : { \"counterGroups\" : [ { \"counterGroupDisplayName\" : \"org.apache.tez.common.counters.DAGCounter\",\n" +
+              "                      \"counterGroupName\" : \"org.apache.tez.common.counters.DAGCounter\",\n" +
+              "                      \"counters\" : [ { \"counterDisplayName\" : \"NUM_SUCCEEDED_TASKS\",\n" +
+              "                            \"counterName\" : \"NUM_SUCCEEDED_TASKS\",\n" +
+              "                            \"counterValue\" : 2\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"TOTAL_LAUNCHED_TASKS\",\n" +
+              "                            \"counterName\" : \"TOTAL_LAUNCHED_TASKS\",\n" +
+              "                            \"counterValue\" : 2\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"DATA_LOCAL_TASKS\",\n" +
+              "                            \"counterName\" : \"DATA_LOCAL_TASKS\",\n" +
+              "                            \"counterValue\" : 1\n" +
+              "                          }\n" +
+              "                        ]\n" +
+              "                    },\n" +
+              "                    { \"counterGroupDisplayName\" : \"File System Counters\",\n" +
+              "                      \"counterGroupName\" : \"org.apache.tez.common.counters.FileSystemCounter\",\n" +
+              "                      \"counters\" : [ { \"counterDisplayName\" : \"FILE_BYTES_READ\",\n" +
+              "                            \"counterName\" : \"FILE_BYTES_READ\",\n" +
+              "                            \"counterValue\" : 57\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"FILE_BYTES_WRITTEN\",\n" +
+              "                            \"counterName\" : \"FILE_BYTES_WRITTEN\",\n" +
+              "                            \"counterValue\" : 82\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"FILE_READ_OPS\",\n" +
+              "                            \"counterName\" : \"FILE_READ_OPS\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"FILE_LARGE_READ_OPS\",\n" +
+              "                            \"counterName\" : \"FILE_LARGE_READ_OPS\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"FILE_WRITE_OPS\",\n" +
+              "                            \"counterName\" : \"FILE_WRITE_OPS\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"HDFS_BYTES_READ\",\n" +
+              "                            \"counterName\" : \"HDFS_BYTES_READ\",\n" +
+              "                            \"counterValue\" : 287\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"HDFS_BYTES_WRITTEN\",\n" +
+              "                            \"counterName\" : \"HDFS_BYTES_WRITTEN\",\n" +
+              "                            \"counterValue\" : 2\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"HDFS_READ_OPS\",\n" +
+              "                            \"counterName\" : \"HDFS_READ_OPS\",\n" +
+              "                            \"counterValue\" : 16\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"HDFS_LARGE_READ_OPS\",\n" +
+              "                            \"counterName\" : \"HDFS_LARGE_READ_OPS\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"HDFS_WRITE_OPS\",\n" +
+              "                            \"counterName\" : \"HDFS_WRITE_OPS\",\n" +
+              "                            \"counterValue\" : 2\n" +
+              "                          }\n" +
+              "                        ]\n" +
+              "                    },\n" +
+              "                    { \"counterGroupDisplayName\" : \"org.apache.tez.common.counters.TaskCounter\",\n" +
+              "                      \"counterGroupName\" : \"org.apache.tez.common.counters.TaskCounter\",\n" +
+              "                      \"counters\" : [ { \"counterDisplayName\" : \"REDUCE_INPUT_GROUPS\",\n" +
+              "                            \"counterName\" : \"REDUCE_INPUT_GROUPS\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"REDUCE_INPUT_RECORDS\",\n" +
+              "                            \"counterName\" : \"REDUCE_INPUT_RECORDS\",\n" +
+              "                            \"counterValue\" : 1\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"COMBINE_INPUT_RECORDS\",\n" +
+              "                            \"counterName\" : \"COMBINE_INPUT_RECORDS\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"SPILLED_RECORDS\",\n" +
+              "                            \"counterName\" : \"SPILLED_RECORDS\",\n" +
+              "                            \"counterValue\" : 2\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"NUM_SHUFFLED_INPUTS\",\n" +
+              "                            \"counterName\" : \"NUM_SHUFFLED_INPUTS\",\n" +
+              "                            \"counterValue\" : 1\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"NUM_SKIPPED_INPUTS\",\n" +
+              "                            \"counterName\" : \"NUM_SKIPPED_INPUTS\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"NUM_FAILED_SHUFFLE_INPUTS\",\n" +
+              "                            \"counterName\" : \"NUM_FAILED_SHUFFLE_INPUTS\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"MERGED_MAP_OUTPUTS\",\n" +
+              "                            \"counterName\" : \"MERGED_MAP_OUTPUTS\",\n" +
+              "                            \"counterValue\" : 1\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"GC_TIME_MILLIS\",\n" +
+              "                            \"counterName\" : \"GC_TIME_MILLIS\",\n" +
+              "                            \"counterValue\" : 389\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"CPU_MILLISECONDS\",\n" +
+              "                            \"counterName\" : \"CPU_MILLISECONDS\",\n" +
+              "                            \"counterValue\" : 2820\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"PHYSICAL_MEMORY_BYTES\",\n" +
+              "                            \"counterName\" : \"PHYSICAL_MEMORY_BYTES\",\n" +
+              "                            \"counterValue\" : 490799104\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"VIRTUAL_MEMORY_BYTES\",\n" +
+              "                            \"counterName\" : \"VIRTUAL_MEMORY_BYTES\",\n" +
+              "                            \"counterValue\" : 1558253568\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"COMMITTED_HEAP_BYTES\",\n" +
+              "                            \"counterName\" : \"COMMITTED_HEAP_BYTES\",\n" +
+              "                            \"counterValue\" : 312475648\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"INPUT_RECORDS_PROCESSED\",\n" +
+              "                            \"counterName\" : \"INPUT_RECORDS_PROCESSED\",\n" +
+              "                            \"counterValue\" : 3\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"OUTPUT_RECORDS\",\n" +
+              "                            \"counterName\" : \"OUTPUT_RECORDS\",\n" +
+              "                            \"counterValue\" : 1\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"OUTPUT_BYTES\",\n" +
+              "                            \"counterName\" : \"OUTPUT_BYTES\",\n" +
+              "                            \"counterValue\" : 3\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"OUTPUT_BYTES_WITH_OVERHEAD\",\n" +
+              "                            \"counterName\" : \"OUTPUT_BYTES_WITH_OVERHEAD\",\n" +
+              "                            \"counterValue\" : 11\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"OUTPUT_BYTES_PHYSICAL\",\n" +
+              "                            \"counterName\" : \"OUTPUT_BYTES_PHYSICAL\",\n" +
+              "                            \"counterValue\" : 25\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"ADDITIONAL_SPILLS_BYTES_WRITTEN\",\n" +
+              "                            \"counterName\" : \"ADDITIONAL_SPILLS_BYTES_WRITTEN\",\n" +
+              "                            \"counterValue\" : 25\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"ADDITIONAL_SPILLS_BYTES_READ\",\n" +
+              "                            \"counterName\" : \"ADDITIONAL_SPILLS_BYTES_READ\",\n" +
+              "                            \"counterValue\" : 25\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"ADDITIONAL_SPILL_COUNT\",\n" +
+              "                            \"counterName\" : \"ADDITIONAL_SPILL_COUNT\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"SHUFFLE_BYTES\",\n" +
+              "                            \"counterName\" : \"SHUFFLE_BYTES\",\n" +
+              "                            \"counterValue\" : 25\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"SHUFFLE_BYTES_DECOMPRESSED\",\n" +
+              "                            \"counterName\" : \"SHUFFLE_BYTES_DECOMPRESSED\",\n" +
+              "                            \"counterValue\" : 11\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"SHUFFLE_BYTES_TO_MEM\",\n" +
+              "                            \"counterName\" : \"SHUFFLE_BYTES_TO_MEM\",\n" +
+              "                            \"counterValue\" : 25\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"SHUFFLE_BYTES_TO_DISK\",\n" +
+              "                            \"counterName\" : \"SHUFFLE_BYTES_TO_DISK\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"SHUFFLE_BYTES_DISK_DIRECT\",\n" +
+              "                            \"counterName\" : \"SHUFFLE_BYTES_DISK_DIRECT\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"NUM_MEM_TO_DISK_MERGES\",\n" +
+              "                            \"counterName\" : \"NUM_MEM_TO_DISK_MERGES\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"NUM_DISK_TO_DISK_MERGES\",\n" +
+              "                            \"counterName\" : \"NUM_DISK_TO_DISK_MERGES\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          }\n" +
+              "                        ]\n" +
+              "                    },\n" +
+              "                    { \"counterGroupDisplayName\" : \"HIVE\",\n" +
+              "                      \"counterGroupName\" : \"HIVE\",\n" +
+              "                      \"counters\" : [ { \"counterDisplayName\" : \"CREATED_FILES\",\n" +
+              "                            \"counterName\" : \"CREATED_FILES\",\n" +
+              "                            \"counterValue\" : 1\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"DESERIALIZE_ERRORS\",\n" +
+              "                            \"counterName\" : \"DESERIALIZE_ERRORS\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"RECORDS_IN_Map_1\",\n" +
+              "                            \"counterName\" : \"RECORDS_IN_Map_1\",\n" +
+              "                            \"counterValue\" : 3\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"RECORDS_OUT_INTERMEDIATE_Map_1\",\n" +
+              "                            \"counterName\" : \"RECORDS_OUT_INTERMEDIATE_Map_1\",\n" +
+              "                            \"counterValue\" : 1\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"RECORDS_OUT_Reducer_2\",\n" +
+              "                            \"counterName\" : \"RECORDS_OUT_Reducer_2\",\n" +
+              "                            \"counterValue\" : 1\n" +
+              "                          }\n" +
+              "                        ]\n" +
+              "                    },\n" +
+              "                    { \"counterGroupDisplayName\" : \"Shuffle Errors\",\n" +
+              "                      \"counterGroupName\" : \"Shuffle Errors\",\n" +
+              "                      \"counters\" : [ { \"counterDisplayName\" : \"BAD_ID\",\n" +
+              "                            \"counterName\" : \"BAD_ID\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"CONNECTION\",\n" +
+              "                            \"counterName\" : \"CONNECTION\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"IO_ERROR\",\n" +
+              "                            \"counterName\" : \"IO_ERROR\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"WRONG_LENGTH\",\n" +
+              "                            \"counterName\" : \"WRONG_LENGTH\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"WRONG_MAP\",\n" +
+              "                            \"counterName\" : \"WRONG_MAP\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          },\n" +
+              "                          { \"counterDisplayName\" : \"WRONG_REDUCE\",\n" +
+              "                            \"counterName\" : \"WRONG_REDUCE\",\n" +
+              "                            \"counterValue\" : 0\n" +
+              "                          }\n" +
+              "                        ]\n" +
+              "                    }\n" +
+              "                  ] },\n" +
+              "            \"dagPlan\" : { \"dagName\" : \"hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4\",\n" +
+              "                \"edges\" : [ { \"dataMovementType\" : \"SCATTER_GATHER\",\n" +
+              "                      \"dataSourceType\" : \"PERSISTED\",\n" +
+              "                      \"edgeDestinationClass\" : \"org.apache.tez.runtime.library.input.OrderedGroupedKVInput\",\n" +
+              "                      \"edgeId\" : \"533454263\",\n" +
+              "                      \"edgeSourceClass\" : \"org.apache.tez.runtime.library.output.OrderedPartitionedKVOutput\",\n" +
+              "                      \"inputVertexName\" : \"Map 1\",\n" +
+              "                      \"outputVertexName\" : \"Reducer 2\",\n" +
+              "                      \"schedulingType\" : \"SEQUENTIAL\"\n" +
+              "                    } ],\n" +
+              "                \"version\" : 1,\n" +
+              "                \"vertices\" : [ { \"additionalInputs\" : [ { \"class\" : \"org.apache.tez.mapreduce.input.MRInputLegacy\",\n" +
+              "                            \"initializer\" : \"org.apache.hadoop.hive.ql.exec.tez.HiveSplitGenerator\",\n" +
+              "                            \"name\" : \"z\"\n" +
+              "                          } ],\n" +
+              "                      \"outEdgeIds\" : [ \"533454263\" ],\n" +
+              "                      \"processorClass\" : \"org.apache.hadoop.hive.ql.exec.tez.MapTezProcessor\",\n" +
+              "                      \"vertexName\" : \"Map 1\"\n" +
+              "                    },\n" +
+              "                    { \"additionalOutputs\" : [ { \"class\" : \"org.apache.tez.mapreduce.output.MROutput\",\n" +
+              "                            \"name\" : \"out_Reducer 2\"\n" +
+              "                          } ],\n" +
+              "                      \"inEdgeIds\" : [ \"533454263\" ],\n" +
+              "                      \"processorClass\" : \"org.apache.hadoop.hive.ql.exec.tez.ReduceTezProcessor\",\n" +
+              "                      \"vertexName\" : \"Reducer 2\"\n" +
+              "                    }\n" +
+              "                  ]\n" +
+              "              },\n" +
+              "            \"diagnostics\" : \"\",\n" +
+              "            \"endTime\" : 1423493342484,\n" +
+              "            \"initTime\" : 1423493325794,\n" +
+              "            \"numCompletedTasks\" : 2,\n" +
+              "            \"numFailedTaskAttempts\" : 0,\n" +
+              "            \"numFailedTasks\" : 0,\n" +
+              "            \"numKilledTaskAttempts\" : 0,\n" +
+              "            \"numKilledTasks\" : 0,\n" +
+              "            \"numSucceededTasks\" : 2,\n" +
+              "            \"startTime\" : 1423493325803,\n" +
+              "            \"status\" : \"SUCCEEDED\",\n" +
+              "            \"timeTaken\" : 16681,\n" +
+              "            \"vertexNameIdMapping\" : { \"Map 1\" : \"vertex_1423156117563_0005_2_00\",\n" +
+              "                \"Reducer 2\" : \"vertex_1423156117563_0005_2_01\"\n" +
+              "              }\n" +
+              "          },\n" +
+              "        \"primaryfilters\" : { \"applicationId\" : [ \"application_1423156117563_0005\" ],\n" +
+              "            \"dagName\" : [ \"hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4\" ],\n" +
+              "            \"status\" : [ \"SUCCEEDED\" ],\n" +
+              "            \"user\" : [ \"hive\" ]\n" +
+              "          },\n" +
+              "        \"relatedentities\" : {  },\n" +
+              "        \"starttime\" : 1423493325578\n" +
+              "      } ] }"
+      );
+    }
+  }
+}
\ No newline at end of file