You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ni...@apache.org on 2016/12/28 09:33:03 UTC

[03/23] ambari git commit: AMBARI-19302 : removed contrib/views/hive folder and made necessary changes in pom.xml files (nitirajrathore)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/ATSParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/ATSParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/ATSParserTest.java
deleted file mode 100644
index 43b0b65..0000000
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/ATSParserTest.java
+++ /dev/null
@@ -1,512 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.jobs;
-
-import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSParser;
-import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSRequestsDelegate;
-import org.apache.ambari.view.hive.resources.jobs.atsJobs.HiveQueryId;
-import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
-import org.apache.ambari.view.hive.resources.jobs.atsJobs.TezDagId;
-import org.apache.commons.codec.binary.Base64;
-import org.json.simple.JSONObject;
-import org.json.simple.JSONValue;
-import org.junit.Assert;
-import org.junit.Test;
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-
-import java.util.Arrays;
-import java.util.List;
-
-public class ATSParserTest {
-  @Test
-  public void testBase64() throws Exception {
-    System.out.println(Arrays.toString(Base64.decodeBase64("HWvpjKiERZCy_le4s-odOQ")));
-  }
-
-  @Test
-  public void testGetHiveJobsList() throws Exception {
-    IATSParser jobLoader = new ATSParser(new ATSRequestsDelegateStub());
-
-    List<HiveQueryId> jobs = jobLoader.getHiveQueryIdsForUser("hive");
-
-    Assert.assertEquals(1, jobs.size());
-
-    HiveQueryId job = jobs.get(0);
-    Assert.assertEquals("hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0", job.entity);
-    Assert.assertEquals(1423493324355L, job.starttime);
-    Assert.assertEquals("hive", job.user);
-    Assert.assertEquals((1423493342843L - 1423493324355L) / 1000L, job.duration);
-    Assert.assertEquals("select count(*) from z", job.query);
-
-    Assert.assertEquals(1, job.dagNames.size());
-    Assert.assertEquals("hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4", job.dagNames.get(0));
-
-    Assert.assertEquals(2, job.stages.size());
-    Assert.assertTrue(HiveQueryId.ATS_15_RESPONSE_VERSION > job.version);
-
-    jobLoader = new ATSParser(new ATSV15RequestsDelegateStub());
-    List<HiveQueryId> jobsv2 = jobLoader.getHiveQueryIdsForUser("hive");
-    Assert.assertEquals(1, jobsv2.size());
-    HiveQueryId jobv2 = jobsv2.get(0);
-    Assert.assertTrue(HiveQueryId.ATS_15_RESPONSE_VERSION <= jobv2.version);
-  }
-
-  @Test
-  public void testGetTezDAGByName() throws Exception {
-    IATSParser jobLoader = new ATSParser(new ATSRequestsDelegateStub());
-
-    TezDagId tezDag = jobLoader.getTezDAGByName("hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4");
-
-    Assert.assertEquals("dag_1423156117563_0005_2", tezDag.entity);
-    Assert.assertEquals("application_1423156117563_0005", tezDag.applicationId);
-    Assert.assertEquals("SUCCEEDED", tezDag.status);
-  }
-
-  @Test
-  public void testGetTezDagByEntity() throws Exception {
-    IATSParser jobLoader = new ATSParser(new ATSV15RequestsDelegateStub());
-
-    TezDagId tezDag = jobLoader.getTezDAGByEntity("hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4");
-
-    Assert.assertEquals("dag_1423156117563_0005_2", tezDag.entity);
-    Assert.assertEquals("application_1423156117563_0005", tezDag.applicationId);
-    Assert.assertEquals("SUCCEEDED", tezDag.status);
-  }
-
-  protected static class ATSV15RequestsDelegateStub extends ATSRequestsDelegateStub {
-    /**
-     * This returns the version field that the ATS v1.5 returns.
-     */
-    @Override
-    public JSONObject hiveQueryIdsForUser(String username) {
-      return (JSONObject) JSONValue.parse(
-        "{ \"entities\" : [ { \"domain\" : \"DEFAULT\",\n" +
-          "        \"entity\" : \"hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0\",\n" +
-          "        \"entitytype\" : \"HIVE_QUERY_ID\",\n" +
-          "        \"events\" : [ { \"eventinfo\" : {  },\n" +
-          "              \"eventtype\" : \"QUERY_COMPLETED\",\n" +
-          "              \"timestamp\" : 1423493342843\n" +
-          "            },\n" +
-          "            { \"eventinfo\" : {  },\n" +
-          "              \"eventtype\" : \"QUERY_SUBMITTED\",\n" +
-          "              \"timestamp\" : 1423493324355\n" +
-          "            }\n" +
-          "          ],\n" +
-          "        \"otherinfo\" : { \"MAPRED\" : false,\n" +
-          "            \"QUERY\" : \"{\\\"queryText\\\":\\\"select count(*) from z\\\",\\\"queryPlan\\\":{\\\"STAGE PLANS\\\":{\\\"Stage-1\\\":{\\\"Tez\\\":{\\\"DagName:\\\":\\\"hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4\\\",\\\"Vertices:\\\":{\\\"Reducer 2\\\":{\\\"Reduce Operator Tree:\\\":{\\\"Group By Operator\\\":{\\\"mode:\\\":\\\"mergepartial\\\",\\\"aggregations:\\\":[\\\"count(VALUE._col0)\\\"],\\\"outputColumnNames:\\\":[\\\"_col0\\\"],\\\"children\\\":{\\\"Select Operator\\\":{\\\"expressions:\\\":\\\"_col0 (type: bigint)\\\",\\\"outputColumnNames:\\\":[\\\"_col0\\\"],\\\"children\\\":{\\\"File Output Operator\\\":{\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\",\\\"compressed:\\\":\\\"false\\\",\\\"table:\\\":{\\\"serde:\\\":\\\"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe\\\",\\\"input format:\\\":\\\"org.apache.hadoop.mapred.TextInputFormat\\\",\\\"output format:\\\":\\\"org.apache.hadoop.hive.ql.i
 o.HiveIgnoreKeyTextOutputFormat\\\"}}},\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}}},\\\"Map 1\\\":{\\\"Map Operator Tree:\\\":[{\\\"TableScan\\\":{\\\"alias:\\\":\\\"z\\\",\\\"children\\\":{\\\"Select Operator\\\":{\\\"children\\\":{\\\"Group By Operator\\\":{\\\"mode:\\\":\\\"hash\\\",\\\"aggregations:\\\":[\\\"count()\\\"],\\\"outputColumnNames:\\\":[\\\"_col0\\\"],\\\"children\\\":{\\\"Reduce Output Operator\\\":{\\\"sort order:\\\":\\\"\\\",\\\"value expressions:\\\":\\\"_col0 (type: bigint)\\\",\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows: 0 Data size: 40 Basic stats: PARTIAL Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows: 0 
 Data size: 40 Basic stats: PARTIAL Column stats: COMPLETE\\\"}}]}},\\\"Edges:\\\":{\\\"Reducer 2\\\":{\\\"parent\\\":\\\"Map 1\\\",\\\"type\\\":\\\"SIMPLE_EDGE\\\"}}}},\\\"Stage-0\\\":{\\\"Fetch Operator\\\":{\\\"limit:\\\":\\\"-1\\\",\\\"Processor Tree:\\\":{\\\"ListSink\\\":{}}}}},\\\"STAGE DEPENDENCIES\\\":{\\\"Stage-1\\\":{\\\"ROOT STAGE\\\":\\\"TRUE\\\"},\\\"Stage-0\\\":{\\\"DEPENDENT STAGES\\\":\\\"Stage-1\\\"}}}}\",\n" +
-          "            \"STATUS\" : true,\n" +
-          "            \"TEZ\" : true\n" +
-          "            \"VERSION\" : 2\n" +
-          "          },\n" +
-          "        \"primaryfilters\" : { \"user\" : [ \"hive\" ] },\n" +
-          "        \"relatedentities\" : {  },\n" +
-          "        \"starttime\" : 1423493324355\n" +
-          "      } ] }"
-      );
-    }
-  }
-
-  protected static class ATSRequestsDelegateStub implements ATSRequestsDelegate {
-
-
-    public JSONObject hiveQueryIdsForUserByTime(String username, long startTime, long endTime) {
-      throw new NotImplementedException();
-    }
-
-    @Override
-    public JSONObject hiveQueryEntityByEntityId(String hiveEntityId) {
-      return null;
-    }
-
-    @Override
-    public String hiveQueryIdDirectUrl(String entity) {
-      return null;
-    }
-
-    @Override
-    public String hiveQueryIdOperationIdUrl(String operationId) {
-      return null;
-    }
-
-    @Override
-    public String tezDagDirectUrl(String entity) {
-      return null;
-    }
-
-    @Override
-    public String tezDagNameUrl(String name) {
-      return null;
-    }
-
-    @Override
-    public String tezVerticesListForDAGUrl(String dagId) {
-      return null;
-    }
-
-    @Override
-    public JSONObject hiveQueryIdsForUser(String username) {
-      return (JSONObject) JSONValue.parse(
-          "{ \"entities\" : [ { \"domain\" : \"DEFAULT\",\n" +
-              "        \"entity\" : \"hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0\",\n" +
-              "        \"entitytype\" : \"HIVE_QUERY_ID\",\n" +
-              "        \"events\" : [ { \"eventinfo\" : {  },\n" +
-              "              \"eventtype\" : \"QUERY_COMPLETED\",\n" +
-              "              \"timestamp\" : 1423493342843\n" +
-              "            },\n" +
-              "            { \"eventinfo\" : {  },\n" +
-              "              \"eventtype\" : \"QUERY_SUBMITTED\",\n" +
-              "              \"timestamp\" : 1423493324355\n" +
-              "            }\n" +
-              "          ],\n" +
-              "        \"otherinfo\" : { \"MAPRED\" : false,\n" +
-              "            \"QUERY\" : \"{\\\"queryText\\\":\\\"select count(*) from z\\\",\\\"queryPlan\\\":{\\\"STAGE PLANS\\\":{\\\"Stage-1\\\":{\\\"Tez\\\":{\\\"DagName:\\\":\\\"hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4\\\",\\\"Vertices:\\\":{\\\"Reducer 2\\\":{\\\"Reduce Operator Tree:\\\":{\\\"Group By Operator\\\":{\\\"mode:\\\":\\\"mergepartial\\\",\\\"aggregations:\\\":[\\\"count(VALUE._col0)\\\"],\\\"outputColumnNames:\\\":[\\\"_col0\\\"],\\\"children\\\":{\\\"Select Operator\\\":{\\\"expressions:\\\":\\\"_col0 (type: bigint)\\\",\\\"outputColumnNames:\\\":[\\\"_col0\\\"],\\\"children\\\":{\\\"File Output Operator\\\":{\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\",\\\"compressed:\\\":\\\"false\\\",\\\"table:\\\":{\\\"serde:\\\":\\\"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe\\\",\\\"input format:\\\":\\\"org.apache.hadoop.mapred.TextInputFormat\\\",\\\"output format:\\\":\\\"org.apache.hadoop.hive.
 ql.io.HiveIgnoreKeyTextOutputFormat\\\"}}},\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}}},\\\"Map 1\\\":{\\\"Map Operator Tree:\\\":[{\\\"TableScan\\\":{\\\"alias:\\\":\\\"z\\\",\\\"children\\\":{\\\"Select Operator\\\":{\\\"children\\\":{\\\"Group By Operator\\\":{\\\"mode:\\\":\\\"hash\\\",\\\"aggregations:\\\":[\\\"count()\\\"],\\\"outputColumnNames:\\\":[\\\"_col0\\\"],\\\"children\\\":{\\\"Reduce Output Operator\\\":{\\\"sort order:\\\":\\\"\\\",\\\"value expressions:\\\":\\\"_col0 (type: bigint)\\\",\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows: 0 Data size: 40 Basic stats: PARTIAL Column stats: COMPLETE\\\"}},\\\"Statistics:\\\":\\\"Num rows
 : 0 Data size: 40 Basic stats: PARTIAL Column stats: COMPLETE\\\"}}]}},\\\"Edges:\\\":{\\\"Reducer 2\\\":{\\\"parent\\\":\\\"Map 1\\\",\\\"type\\\":\\\"SIMPLE_EDGE\\\"}}}},\\\"Stage-0\\\":{\\\"Fetch Operator\\\":{\\\"limit:\\\":\\\"-1\\\",\\\"Processor Tree:\\\":{\\\"ListSink\\\":{}}}}},\\\"STAGE DEPENDENCIES\\\":{\\\"Stage-1\\\":{\\\"ROOT STAGE\\\":\\\"TRUE\\\"},\\\"Stage-0\\\":{\\\"DEPENDENT STAGES\\\":\\\"Stage-1\\\"}}}}\",\n" +
-              "            \"STATUS\" : true,\n" +
-              "            \"TEZ\" : true\n" +
-              "          },\n" +
-              "        \"primaryfilters\" : { \"user\" : [ \"hive\" ] },\n" +
-              "        \"relatedentities\" : {  },\n" +
-              "        \"starttime\" : 1423493324355\n" +
-              "      } ] }"
-      );
-    }
-
-    @Override
-    public JSONObject hiveQueryIdByOperationId(String operationId) {
-      throw new NotImplementedException();
-    }
-
-    @Override
-    public JSONObject tezDagByName(String name) {
-      return (JSONObject) JSONValue.parse(
-          "{ \"entities\" : [ { \"domain\" : \"DEFAULT\",\n" +
-              "        \"entity\" : \"dag_1423156117563_0005_2\",\n" +
-              "        \"entitytype\" : \"TEZ_DAG_ID\",\n" +
-              "        \"events\" : [ { \"eventinfo\" : {  },\n" +
-              "              \"eventtype\" : \"DAG_FINISHED\",\n" +
-              "              \"timestamp\" : 1423493342484\n" +
-              "            },\n" +
-              "            { \"eventinfo\" : {  },\n" +
-              "              \"eventtype\" : \"DAG_STARTED\",\n" +
-              "              \"timestamp\" : 1423493325803\n" +
-              "            },\n" +
-              "            { \"eventinfo\" : {  },\n" +
-              "              \"eventtype\" : \"DAG_INITIALIZED\",\n" +
-              "              \"timestamp\" : 1423493325794\n" +
-              "            },\n" +
-              "            { \"eventinfo\" : {  },\n" +
-              "              \"eventtype\" : \"DAG_SUBMITTED\",\n" +
-              "              \"timestamp\" : 1423493325578\n" +
-              "            }\n" +
-              "          ],\n" +
-              "        \"otherinfo\" : { \"applicationId\" : \"application_1423156117563_0005\",\n" +
-              "            \"counters\" : { \"counterGroups\" : [ { \"counterGroupDisplayName\" : \"org.apache.tez.common.counters.DAGCounter\",\n" +
-              "                      \"counterGroupName\" : \"org.apache.tez.common.counters.DAGCounter\",\n" +
-              "                      \"counters\" : [ { \"counterDisplayName\" : \"NUM_SUCCEEDED_TASKS\",\n" +
-              "                            \"counterName\" : \"NUM_SUCCEEDED_TASKS\",\n" +
-              "                            \"counterValue\" : 2\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"TOTAL_LAUNCHED_TASKS\",\n" +
-              "                            \"counterName\" : \"TOTAL_LAUNCHED_TASKS\",\n" +
-              "                            \"counterValue\" : 2\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"DATA_LOCAL_TASKS\",\n" +
-              "                            \"counterName\" : \"DATA_LOCAL_TASKS\",\n" +
-              "                            \"counterValue\" : 1\n" +
-              "                          }\n" +
-              "                        ]\n" +
-              "                    },\n" +
-              "                    { \"counterGroupDisplayName\" : \"File System Counters\",\n" +
-              "                      \"counterGroupName\" : \"org.apache.tez.common.counters.FileSystemCounter\",\n" +
-              "                      \"counters\" : [ { \"counterDisplayName\" : \"FILE_BYTES_READ\",\n" +
-              "                            \"counterName\" : \"FILE_BYTES_READ\",\n" +
-              "                            \"counterValue\" : 57\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"FILE_BYTES_WRITTEN\",\n" +
-              "                            \"counterName\" : \"FILE_BYTES_WRITTEN\",\n" +
-              "                            \"counterValue\" : 82\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"FILE_READ_OPS\",\n" +
-              "                            \"counterName\" : \"FILE_READ_OPS\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"FILE_LARGE_READ_OPS\",\n" +
-              "                            \"counterName\" : \"FILE_LARGE_READ_OPS\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"FILE_WRITE_OPS\",\n" +
-              "                            \"counterName\" : \"FILE_WRITE_OPS\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"HDFS_BYTES_READ\",\n" +
-              "                            \"counterName\" : \"HDFS_BYTES_READ\",\n" +
-              "                            \"counterValue\" : 287\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"HDFS_BYTES_WRITTEN\",\n" +
-              "                            \"counterName\" : \"HDFS_BYTES_WRITTEN\",\n" +
-              "                            \"counterValue\" : 2\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"HDFS_READ_OPS\",\n" +
-              "                            \"counterName\" : \"HDFS_READ_OPS\",\n" +
-              "                            \"counterValue\" : 16\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"HDFS_LARGE_READ_OPS\",\n" +
-              "                            \"counterName\" : \"HDFS_LARGE_READ_OPS\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"HDFS_WRITE_OPS\",\n" +
-              "                            \"counterName\" : \"HDFS_WRITE_OPS\",\n" +
-              "                            \"counterValue\" : 2\n" +
-              "                          }\n" +
-              "                        ]\n" +
-              "                    },\n" +
-              "                    { \"counterGroupDisplayName\" : \"org.apache.tez.common.counters.TaskCounter\",\n" +
-              "                      \"counterGroupName\" : \"org.apache.tez.common.counters.TaskCounter\",\n" +
-              "                      \"counters\" : [ { \"counterDisplayName\" : \"REDUCE_INPUT_GROUPS\",\n" +
-              "                            \"counterName\" : \"REDUCE_INPUT_GROUPS\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"REDUCE_INPUT_RECORDS\",\n" +
-              "                            \"counterName\" : \"REDUCE_INPUT_RECORDS\",\n" +
-              "                            \"counterValue\" : 1\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"COMBINE_INPUT_RECORDS\",\n" +
-              "                            \"counterName\" : \"COMBINE_INPUT_RECORDS\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"SPILLED_RECORDS\",\n" +
-              "                            \"counterName\" : \"SPILLED_RECORDS\",\n" +
-              "                            \"counterValue\" : 2\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"NUM_SHUFFLED_INPUTS\",\n" +
-              "                            \"counterName\" : \"NUM_SHUFFLED_INPUTS\",\n" +
-              "                            \"counterValue\" : 1\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"NUM_SKIPPED_INPUTS\",\n" +
-              "                            \"counterName\" : \"NUM_SKIPPED_INPUTS\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"NUM_FAILED_SHUFFLE_INPUTS\",\n" +
-              "                            \"counterName\" : \"NUM_FAILED_SHUFFLE_INPUTS\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"MERGED_MAP_OUTPUTS\",\n" +
-              "                            \"counterName\" : \"MERGED_MAP_OUTPUTS\",\n" +
-              "                            \"counterValue\" : 1\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"GC_TIME_MILLIS\",\n" +
-              "                            \"counterName\" : \"GC_TIME_MILLIS\",\n" +
-              "                            \"counterValue\" : 389\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"CPU_MILLISECONDS\",\n" +
-              "                            \"counterName\" : \"CPU_MILLISECONDS\",\n" +
-              "                            \"counterValue\" : 2820\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"PHYSICAL_MEMORY_BYTES\",\n" +
-              "                            \"counterName\" : \"PHYSICAL_MEMORY_BYTES\",\n" +
-              "                            \"counterValue\" : 490799104\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"VIRTUAL_MEMORY_BYTES\",\n" +
-              "                            \"counterName\" : \"VIRTUAL_MEMORY_BYTES\",\n" +
-              "                            \"counterValue\" : 1558253568\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"COMMITTED_HEAP_BYTES\",\n" +
-              "                            \"counterName\" : \"COMMITTED_HEAP_BYTES\",\n" +
-              "                            \"counterValue\" : 312475648\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"INPUT_RECORDS_PROCESSED\",\n" +
-              "                            \"counterName\" : \"INPUT_RECORDS_PROCESSED\",\n" +
-              "                            \"counterValue\" : 3\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"OUTPUT_RECORDS\",\n" +
-              "                            \"counterName\" : \"OUTPUT_RECORDS\",\n" +
-              "                            \"counterValue\" : 1\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"OUTPUT_BYTES\",\n" +
-              "                            \"counterName\" : \"OUTPUT_BYTES\",\n" +
-              "                            \"counterValue\" : 3\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"OUTPUT_BYTES_WITH_OVERHEAD\",\n" +
-              "                            \"counterName\" : \"OUTPUT_BYTES_WITH_OVERHEAD\",\n" +
-              "                            \"counterValue\" : 11\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"OUTPUT_BYTES_PHYSICAL\",\n" +
-              "                            \"counterName\" : \"OUTPUT_BYTES_PHYSICAL\",\n" +
-              "                            \"counterValue\" : 25\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"ADDITIONAL_SPILLS_BYTES_WRITTEN\",\n" +
-              "                            \"counterName\" : \"ADDITIONAL_SPILLS_BYTES_WRITTEN\",\n" +
-              "                            \"counterValue\" : 25\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"ADDITIONAL_SPILLS_BYTES_READ\",\n" +
-              "                            \"counterName\" : \"ADDITIONAL_SPILLS_BYTES_READ\",\n" +
-              "                            \"counterValue\" : 25\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"ADDITIONAL_SPILL_COUNT\",\n" +
-              "                            \"counterName\" : \"ADDITIONAL_SPILL_COUNT\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"SHUFFLE_BYTES\",\n" +
-              "                            \"counterName\" : \"SHUFFLE_BYTES\",\n" +
-              "                            \"counterValue\" : 25\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"SHUFFLE_BYTES_DECOMPRESSED\",\n" +
-              "                            \"counterName\" : \"SHUFFLE_BYTES_DECOMPRESSED\",\n" +
-              "                            \"counterValue\" : 11\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"SHUFFLE_BYTES_TO_MEM\",\n" +
-              "                            \"counterName\" : \"SHUFFLE_BYTES_TO_MEM\",\n" +
-              "                            \"counterValue\" : 25\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"SHUFFLE_BYTES_TO_DISK\",\n" +
-              "                            \"counterName\" : \"SHUFFLE_BYTES_TO_DISK\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"SHUFFLE_BYTES_DISK_DIRECT\",\n" +
-              "                            \"counterName\" : \"SHUFFLE_BYTES_DISK_DIRECT\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"NUM_MEM_TO_DISK_MERGES\",\n" +
-              "                            \"counterName\" : \"NUM_MEM_TO_DISK_MERGES\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"NUM_DISK_TO_DISK_MERGES\",\n" +
-              "                            \"counterName\" : \"NUM_DISK_TO_DISK_MERGES\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          }\n" +
-              "                        ]\n" +
-              "                    },\n" +
-              "                    { \"counterGroupDisplayName\" : \"HIVE\",\n" +
-              "                      \"counterGroupName\" : \"HIVE\",\n" +
-              "                      \"counters\" : [ { \"counterDisplayName\" : \"CREATED_FILES\",\n" +
-              "                            \"counterName\" : \"CREATED_FILES\",\n" +
-              "                            \"counterValue\" : 1\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"DESERIALIZE_ERRORS\",\n" +
-              "                            \"counterName\" : \"DESERIALIZE_ERRORS\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"RECORDS_IN_Map_1\",\n" +
-              "                            \"counterName\" : \"RECORDS_IN_Map_1\",\n" +
-              "                            \"counterValue\" : 3\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"RECORDS_OUT_INTERMEDIATE_Map_1\",\n" +
-              "                            \"counterName\" : \"RECORDS_OUT_INTERMEDIATE_Map_1\",\n" +
-              "                            \"counterValue\" : 1\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"RECORDS_OUT_Reducer_2\",\n" +
-              "                            \"counterName\" : \"RECORDS_OUT_Reducer_2\",\n" +
-              "                            \"counterValue\" : 1\n" +
-              "                          }\n" +
-              "                        ]\n" +
-              "                    },\n" +
-              "                    { \"counterGroupDisplayName\" : \"Shuffle Errors\",\n" +
-              "                      \"counterGroupName\" : \"Shuffle Errors\",\n" +
-              "                      \"counters\" : [ { \"counterDisplayName\" : \"BAD_ID\",\n" +
-              "                            \"counterName\" : \"BAD_ID\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"CONNECTION\",\n" +
-              "                            \"counterName\" : \"CONNECTION\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"IO_ERROR\",\n" +
-              "                            \"counterName\" : \"IO_ERROR\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"WRONG_LENGTH\",\n" +
-              "                            \"counterName\" : \"WRONG_LENGTH\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"WRONG_MAP\",\n" +
-              "                            \"counterName\" : \"WRONG_MAP\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          },\n" +
-              "                          { \"counterDisplayName\" : \"WRONG_REDUCE\",\n" +
-              "                            \"counterName\" : \"WRONG_REDUCE\",\n" +
-              "                            \"counterValue\" : 0\n" +
-              "                          }\n" +
-              "                        ]\n" +
-              "                    }\n" +
-              "                  ] },\n" +
-              "            \"dagPlan\" : { \"dagName\" : \"hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4\",\n" +
-              "                \"edges\" : [ { \"dataMovementType\" : \"SCATTER_GATHER\",\n" +
-              "                      \"dataSourceType\" : \"PERSISTED\",\n" +
-              "                      \"edgeDestinationClass\" : \"org.apache.tez.runtime.library.input.OrderedGroupedKVInput\",\n" +
-              "                      \"edgeId\" : \"533454263\",\n" +
-              "                      \"edgeSourceClass\" : \"org.apache.tez.runtime.library.output.OrderedPartitionedKVOutput\",\n" +
-              "                      \"inputVertexName\" : \"Map 1\",\n" +
-              "                      \"outputVertexName\" : \"Reducer 2\",\n" +
-              "                      \"schedulingType\" : \"SEQUENTIAL\"\n" +
-              "                    } ],\n" +
-              "                \"version\" : 1,\n" +
-              "                \"vertices\" : [ { \"additionalInputs\" : [ { \"class\" : \"org.apache.tez.mapreduce.input.MRInputLegacy\",\n" +
-              "                            \"initializer\" : \"org.apache.hadoop.hive.ql.exec.tez.HiveSplitGenerator\",\n" +
-              "                            \"name\" : \"z\"\n" +
-              "                          } ],\n" +
-              "                      \"outEdgeIds\" : [ \"533454263\" ],\n" +
-              "                      \"processorClass\" : \"org.apache.hadoop.hive.ql.exec.tez.MapTezProcessor\",\n" +
-              "                      \"vertexName\" : \"Map 1\"\n" +
-              "                    },\n" +
-              "                    { \"additionalOutputs\" : [ { \"class\" : \"org.apache.tez.mapreduce.output.MROutput\",\n" +
-              "                            \"name\" : \"out_Reducer 2\"\n" +
-              "                          } ],\n" +
-              "                      \"inEdgeIds\" : [ \"533454263\" ],\n" +
-              "                      \"processorClass\" : \"org.apache.hadoop.hive.ql.exec.tez.ReduceTezProcessor\",\n" +
-              "                      \"vertexName\" : \"Reducer 2\"\n" +
-              "                    }\n" +
-              "                  ]\n" +
-              "              },\n" +
-              "            \"diagnostics\" : \"\",\n" +
-              "            \"endTime\" : 1423493342484,\n" +
-              "            \"initTime\" : 1423493325794,\n" +
-              "            \"numCompletedTasks\" : 2,\n" +
-              "            \"numFailedTaskAttempts\" : 0,\n" +
-              "            \"numFailedTasks\" : 0,\n" +
-              "            \"numKilledTaskAttempts\" : 0,\n" +
-              "            \"numKilledTasks\" : 0,\n" +
-              "            \"numSucceededTasks\" : 2,\n" +
-              "            \"startTime\" : 1423493325803,\n" +
-              "            \"status\" : \"SUCCEEDED\",\n" +
-              "            \"timeTaken\" : 16681,\n" +
-              "            \"vertexNameIdMapping\" : { \"Map 1\" : \"vertex_1423156117563_0005_2_00\",\n" +
-              "                \"Reducer 2\" : \"vertex_1423156117563_0005_2_01\"\n" +
-              "              }\n" +
-              "          },\n" +
-              "        \"primaryfilters\" : { \"applicationId\" : [ \"application_1423156117563_0005\" ],\n" +
-              "            \"dagName\" : [ \"hive_20150209144848_c3a5a07b-c3b6-4f57-a6d5-3dadecdd6fd0:4\" ],\n" +
-              "            \"status\" : [ \"SUCCEEDED\" ],\n" +
-              "            \"user\" : [ \"hive\" ]\n" +
-              "          },\n" +
-              "        \"relatedentities\" : {  },\n" +
-              "        \"starttime\" : 1423493325578\n" +
-              "      } ] }"
-      );
-    }
-
-    @Override
-    public JSONObject tezVerticesListForDAG(String dagId) {
-      return null;
-    }
-
-    @Override
-    public JSONObject tezDagByEntity(String entity) {
-      return tezDagByName(entity);
-    }
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
deleted file mode 100644
index 91478e7..0000000
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
+++ /dev/null
@@ -1,506 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.jobs;
-
-import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
-import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.hive.resources.IResourceManager;
-import org.apache.ambari.view.hive.resources.jobs.atsJobs.HiveQueryId;
-import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
-import org.apache.ambari.view.hive.resources.jobs.atsJobs.TezDagId;
-import org.apache.ambari.view.hive.resources.jobs.atsJobs.TezVertexId;
-import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
-import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl;
-import org.apache.hive.service.cli.thrift.TOperationHandle;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-public class AggregatorTest {
-
-  public static final String SOME_QUERY = "some query";
-
-  @Test
-  public void testReadJobOutsideOfHS2() throws Exception {
-    HiveQueryId hiveQueryId = getSampleHiveQueryId("ENTITY-NAME");
-    ensureOperationIdUnset(hiveQueryId);
-
-    MockATSParser atsParser = getMockATSWithQueries(hiveQueryId);
-
-
-    Aggregator aggregator = new Aggregator(getEmptyJobResourceManager(),
-        getEmptyOperationHandleResourceManager(),
-        atsParser);
-
-    List<Job> aggregated = aggregator.readAll("luke");
-
-    Assert.assertEquals(1, aggregated.size());
-    Job job = aggregated.get(0);
-    Assert.assertEquals("ENTITY-NAME", job.getId());
-    Assert.assertEquals(SOME_QUERY, job.getTitle());
-  }
-
-  @Test
-  public void testReadJobWithHS2OutsideOfView() throws Exception {
-    HiveQueryId hiveQueryId = getSampleHiveQueryId("ENTITY-NAME");
-    ensureOperationIdUnset(hiveQueryId);
-
-    MockATSParser atsParser = getMockATSWithQueries(hiveQueryId);
-    Aggregator aggregator = new Aggregator(getEmptyJobResourceManager(),
-        getEmptyOperationHandleResourceManager(),
-        atsParser);
-
-    List<Job> aggregated = aggregator.readAll("luke");
-
-    Assert.assertEquals(1, aggregated.size());
-    Job job = aggregated.get(0);
-    Assert.assertEquals("ENTITY-NAME", job.getId());
-    Assert.assertEquals(SOME_QUERY, job.getTitle());
-  }
-
-  @Test
-  public void testJobWithoutOperationIdShouldBeIgnored() throws Exception {
-    MockJobResourceManager jobResourceManager = getJobResourceManagerWithJobs(getSampleViewJob("1"));
-
-    Aggregator aggregator = new Aggregator(jobResourceManager,
-        getEmptyOperationHandleResourceManager(),
-        getEmptyATSParser());
-
-    List<Job> aggregated = aggregator.readAll("luke");
-
-    Assert.assertEquals(0, aggregated.size());
-  }
-
-  @Test
-  public void testReadJobOnlyInView() throws Exception {
-    MockJobResourceManager jobResourceManager = getJobResourceManagerWithJobs(getSampleViewJob("1"));
-
-    StoredOperationHandle operationHandle = getSampleOperationHandle("5", "1");
-    MockOperationHandleResourceManager operationHandleResourceManager = getOperationHandleRMWithEntities(Arrays.asList(operationHandle), null);
-
-    Aggregator aggregator = new Aggregator(jobResourceManager,
-        operationHandleResourceManager,
-        getEmptyATSParser());
-
-    List<Job> aggregated = aggregator.readAll("luke");
-
-    Assert.assertEquals(1, aggregated.size());
-    Job job = aggregated.get(0);
-    Assert.assertEquals("1", job.getId());
-  }
-
-  private MockOperationHandleResourceManager getOperationHandleRMWithEntities(List<StoredOperationHandle> operationHandles, List<Job> jobs) {
-    MockOperationHandleResourceManager operationHandleResourceManager = getEmptyOperationHandleResourceManager();
-    HashMap<String, StoredOperationHandle> storage = new HashMap<String, StoredOperationHandle>();
-    for (StoredOperationHandle handle : operationHandles) {
-      storage.put(handle.getJobId(), handle);
-    }
-    if (null != jobs) {
-      Iterator<Job> jobIterator = jobs.iterator();
-      HashMap<String, Job> jobStorage = new HashMap<String, Job>();
-      for (StoredOperationHandle handle : operationHandles) {
-        jobStorage.put(handle.getGuid(), jobIterator.next());
-        operationHandleResourceManager.setJobStorage(jobStorage);
-      }
-    }
-    operationHandleResourceManager.setStorage(storage);
-
-    return operationHandleResourceManager;
-  }
-
-  @Test
-  public void testReadJobBothATSAndView() throws Exception {
-    HiveQueryId hiveQueryId = getSampleHiveQueryId("ENTITY-NAME");
-    hiveQueryId.operationId = Aggregator.hexStringToUrlSafeBase64("1b2b");
-    MockATSParser atsParser = getMockATSWithQueries(hiveQueryId);
-
-    Job job1 = getSampleViewJob("1");
-    MockJobResourceManager jobResourceManager = getJobResourceManagerWithJobs(job1);
-
-    StoredOperationHandle operationHandle = getSampleOperationHandle("5", "1");
-    operationHandle.setGuid("1b2b");
-    MockOperationHandleResourceManager operationHandleResourceManager = getOperationHandleRMWithEntities(Arrays.asList(operationHandle), Arrays.asList(job1));
-
-    Aggregator aggregator = new Aggregator(jobResourceManager,
-        operationHandleResourceManager,
-        atsParser);
-
-    List<Job> aggregated = aggregator.readAll("luke");
-
-    Assert.assertEquals(1, aggregated.size());
-    Job job = aggregated.get(0);
-    Assert.assertEquals("1", job.getId());
-  }
-
-  @Test
-  public void testReadJobBothATSAndViewV2() throws Exception {
-    HiveQueryId hiveQueryId = getSampleHiveQueryIdV2("ENTITY-NAME");
-    hiveQueryId.operationId = Aggregator.hexStringToUrlSafeBase64("1b2b");
-    MockATSParser atsParser = getMockATSWithQueries(hiveQueryId);
-
-    Job job1 = getSampleViewJob("1");
-    MockJobResourceManager jobResourceManager = getJobResourceManagerWithJobs(job1);
-
-    StoredOperationHandle operationHandle = getSampleOperationHandle("5", "1");
-    operationHandle.setGuid("1b2b");
-    MockOperationHandleResourceManager operationHandleResourceManager = getOperationHandleRMWithEntities(Arrays.asList(operationHandle), Arrays.asList(job1));
-
-    Aggregator aggregator = new Aggregator(jobResourceManager,
-      operationHandleResourceManager,
-      atsParser);
-
-    List<Job> aggregated = aggregator.readAll("luke");
-
-    Assert.assertEquals(1, aggregated.size());
-    Job job = aggregated.get(0);
-    Assert.assertEquals("1", job.getId());
-    Assert.assertEquals("app_test_1", job.getApplicationId());
-    Assert.assertEquals("ENTITY-NAME", job.getDagId());
-    Assert.assertEquals("SUCCEEDED", job.getStatus());
-  }
-
-
-  @Test
-  public void testReadJobComplex() throws Exception {
-    //job both on ATS and View
-    HiveQueryId hiveQueryId1 = getSampleHiveQueryId("ENTITY-NAME");
-    hiveQueryId1.operationId = Aggregator.hexStringToUrlSafeBase64("1a1b");
-    Job job1 = getSampleViewJob("1");
-    Job job2 = getSampleViewJob("2");
-    StoredOperationHandle operationHandle1 = getSampleOperationHandle("5", "1");
-    operationHandle1.setGuid("1a1b");
-    StoredOperationHandle operationHandle2 = getSampleOperationHandle("5", "2");
-    operationHandle2.setGuid("2a2b");
-    //job only on ATS
-    HiveQueryId hiveQueryId2 = getSampleHiveQueryId("ENTITY-NAME2");
-    hiveQueryId2.operationId = Aggregator.hexStringToUrlSafeBase64("2a2b");
-
-    //job only in View
-    Job job3 = getSampleViewJob("3");
-    StoredOperationHandle operationHandle3 = getSampleOperationHandle("6", "3");
-    operationHandle3.setGuid("3c3d");
-
-
-    MockATSParser atsParser = getMockATSWithQueries(
-        hiveQueryId1, hiveQueryId2);
-    MockJobResourceManager jobResourceManager = getJobResourceManagerWithJobs(
-        job1, job3);
-    MockOperationHandleResourceManager operationHandleRM = getOperationHandleRMWithEntities(Arrays.asList(
-      operationHandle1, operationHandle2, operationHandle3), Arrays.asList(job1, job2, job3));
-
-    Aggregator aggregator = new Aggregator(jobResourceManager,
-        operationHandleRM,
-        atsParser);
-
-    List<Job> aggregated = aggregator.readAll("luke");
-
-    Assert.assertEquals(3, aggregated.size());
-  }
-
-  private MockJobResourceManager getJobResourceManagerWithJobs(Job... jobs) {
-    MockJobResourceManager jobResourceManager = getEmptyJobResourceManager();
-    jobResourceManager.setJobs(Arrays.asList(jobs));
-    return jobResourceManager;
-  }
-
-  private MockATSParser getEmptyATSParser() {
-    return new MockATSParser();
-  }
-
-  private void ensureOperationIdUnset(HiveQueryId hiveQueryId) {
-    hiveQueryId.operationId = null;
-  }
-
-  public void ensureOperationIdSet(HiveQueryId hiveQueryId) {
-    hiveQueryId.operationId = "operation-id";
-  }
-
-  private MockOperationHandleResourceManager getEmptyOperationHandleResourceManager() {
-    return new MockOperationHandleResourceManager();
-  }
-
-  private MockJobResourceManager getEmptyJobResourceManager() {
-    return new MockJobResourceManager();
-  }
-
-  private MockATSParser getMockATSWithQueries(HiveQueryId... hiveQueryIds) {
-    MockATSParser atsParser = getEmptyATSParser();
-    atsParser.setHiveQueryIds(Arrays.asList(hiveQueryIds));
-    return atsParser;
-  }
-
-  private JobImpl getSampleViewJob(String id) {
-    JobImpl job = new JobImpl();
-    job.setTitle("Test");
-    job.setId(id);
-    job.setOwner("luke");
-    return job;
-  }
-
-  private StoredOperationHandle getSampleOperationHandle(String id, String jobId) {
-    StoredOperationHandle opHandle = new StoredOperationHandle();
-    opHandle.setId(id);
-    opHandle.setJobId(jobId);
-    opHandle.setGuid("1b2b");
-    return opHandle;
-  }
-
-  private HiveQueryId getSampleHiveQueryId(String id) {
-    HiveQueryId hiveQueryId = new HiveQueryId();
-    hiveQueryId.entity = id;
-    hiveQueryId.query = SOME_QUERY;
-    hiveQueryId.user = "luke";
-    hiveQueryId.operationId = "fUjdt-VMRYuKRPCDTUr_rg";
-    hiveQueryId.dagNames = new LinkedList<String>();
-    return hiveQueryId;
-  }
-
-  private HiveQueryId getSampleHiveQueryIdV2(String id) {
-    HiveQueryId hiveQueryId = getSampleHiveQueryId(id);
-    hiveQueryId.version = HiveQueryId.ATS_15_RESPONSE_VERSION;
-    return hiveQueryId;
-  }
-
-  @Test
-  public void testGetJobByOperationId() throws Exception {
-
-  }
-
-  @Test
-  public void testUrlSafeBase64ToHexString() throws Exception {
-    String urlSafe = Aggregator.hexStringToUrlSafeBase64("1a1b");
-    Assert.assertEquals("Ghs", urlSafe);
-  }
-
-  @Test
-  public void testHexStringToUrlSafeBase64() throws Exception {
-    String hex = Aggregator.urlSafeBase64ToHexString("Ghs");
-    Assert.assertEquals("1a1b", hex);
-  }
-
-  public static class MockJobResourceManager implements IResourceManager<Job> {
-
-    private List<Job> jobs = new LinkedList<Job>();
-
-    @Override
-    public Job create(Job object) {
-      return null;
-    }
-
-    @Override
-    public Job read(Object id) throws ItemNotFound {
-      for(Job job : jobs) {
-        if (job.getId().equals(id)) {
-          return job;
-        }
-      }
-      throw new ItemNotFound();
-    }
-
-    @Override
-    public List<Job> readAll(FilteringStrategy filteringStrategy) {
-      return jobs;
-    }
-
-    @Override
-    public Job update(Job newObject, String id) throws ItemNotFound {
-      return null;
-    }
-
-    @Override
-    public void delete(Object resourceId) throws ItemNotFound {
-
-    }
-
-    public List<Job> getJobs() {
-      return jobs;
-    }
-
-    public void setJobs(List<Job> jobs) {
-      this.jobs = jobs;
-    }
-  }
-
-  public static class MockOperationHandleResourceManager implements IOperationHandleResourceManager {
-    private HashMap<String, StoredOperationHandle> storage = new HashMap<String, StoredOperationHandle>();
-    private HashMap<String, Job> jobStorage = new HashMap<>();
-
-    public MockOperationHandleResourceManager() {
-
-    }
-
-    @Override
-    public List<StoredOperationHandle> readJobRelatedHandles(Job job) {
-      LinkedList<StoredOperationHandle> storedOperationHandles = new LinkedList<StoredOperationHandle>();
-      StoredOperationHandle operationHandle = storage.get(job.getId());
-      if (operationHandle != null)
-        storedOperationHandles.add(operationHandle);
-      return storedOperationHandles;
-    }
-
-    @Override
-    public List<Job> getHandleRelatedJobs(StoredOperationHandle operationHandle) {
-      return new LinkedList<Job>();
-    }
-
-    @Override
-    public Job getJobByHandle(StoredOperationHandle handle) throws ItemNotFound {
-      return jobStorage.get(handle.getGuid());
-    }
-
-    @Override
-    public void putHandleForJob(TOperationHandle h, Job job) {
-
-    }
-
-    @Override
-    public boolean containsHandleForJob(Job job) {
-      return false;
-    }
-
-    @Override
-    public StoredOperationHandle getHandleForJob(Job job) throws ItemNotFound {
-      List<StoredOperationHandle> handles = readJobRelatedHandles(job);
-      if (handles.size() == 0)
-        throw new ItemNotFound();
-      return handles.get(0);
-    }
-
-    @Override
-    public StoredOperationHandle create(StoredOperationHandle object) {
-      return null;
-    }
-
-    @Override
-    public StoredOperationHandle read(Object id) throws ItemNotFound {
-      return null;
-    }
-
-    @Override
-    public List<StoredOperationHandle> readAll(FilteringStrategy filteringStrategy) {
-      LinkedList<StoredOperationHandle> storedOperationHandles = new LinkedList<StoredOperationHandle>();
-      for (StoredOperationHandle handle : storage.values()) {
-        if (filteringStrategy.isConform(handle))
-          storedOperationHandles.add(handle);
-      }
-      return storedOperationHandles;
-    }
-
-    @Override
-    public StoredOperationHandle update(StoredOperationHandle newObject, String id) throws ItemNotFound {
-      return null;
-    }
-
-    @Override
-    public void delete(Object resourceId) throws ItemNotFound {
-
-    }
-
-    public HashMap<String, StoredOperationHandle> getStorage() {
-      return storage;
-    }
-
-    public void setStorage(HashMap<String, StoredOperationHandle> storage) {
-      this.storage = storage;
-    }
-
-    public HashMap<String, Job> getJobStorage() {
-      return jobStorage;
-    }
-
-    public void setJobStorage(HashMap<String, Job> jobStorage) {
-      this.jobStorage = jobStorage;
-    }
-  }
-
-  public static class MockATSParser implements IATSParser {
-
-    private List<HiveQueryId> hiveQueryIds = new LinkedList<HiveQueryId>();
-
-    public MockATSParser() {
-    }
-
-    @Override
-    public List<HiveQueryId> getHiveQueryIdsForUser(String username) {
-      return hiveQueryIds;
-    }
-
-    @Override
-    public List<TezVertexId> getVerticesForDAGId(String dagId) {
-      List<TezVertexId> vertices = new LinkedList<TezVertexId>();
-      TezVertexId tezVertexId1 = new TezVertexId();
-      tezVertexId1.entity = "vertex_1234567_99_99_01";
-      tezVertexId1.vertexName = "Map 1";
-      vertices.add(tezVertexId1);
-
-      TezVertexId tezVertexId2 = new TezVertexId();
-      tezVertexId2.entity = "vertex_1234567_99_99_00";
-      tezVertexId2.vertexName = "Reduce 1";
-      vertices.add(tezVertexId2);
-      return vertices;
-    }
-
-    @Override
-    public HiveQueryId getHiveQueryIdByOperationId(String guid) {
-      return new HiveQueryId();
-    }
-
-    @Override
-    public TezDagId getTezDAGByName(String name) {
-      return new TezDagId();
-    }
-
-    @Override
-    public TezDagId getTezDAGByEntity(String entity) {
-      TezDagId dagId = new TezDagId();
-      dagId.applicationId = "app_test_1";
-      dagId.entity = entity;
-      dagId.status = "SUCCEEDED";
-      return dagId;
-    }
-
-    @Override
-    public List<HiveQueryId> getHiveQueryIdsForUserByTime(String username, long startTime, long endTime) {
-      return null;
-    }
-
-    @Override
-    public HiveQueryId getHiveQueryIdByHiveEntityId(String hiveEntityId) {
-      return null;
-    }
-
-    @Override
-    public List<HiveQueryId> getHiveQueryIdByEntityList(List<String> hiveEntityIds) {
-      return null;
-    }
-
-    public List<HiveQueryId> getHiveQueryIds() {
-      return hiveQueryIds;
-    }
-
-    public void setHiveQueryIds(List<HiveQueryId> hiveQueryIds) {
-      this.hiveQueryIds = hiveQueryIds;
-    }
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobLDAPServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobLDAPServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobLDAPServiceTest.java
deleted file mode 100644
index dfdcb34..0000000
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobLDAPServiceTest.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.jobs;
-
-import org.apache.ambari.view.hive.BaseHiveTest;
-import org.apache.ambari.view.hive.ServiceTestUtils;
-import org.apache.ambari.view.hive.client.Connection;
-import org.apache.ambari.view.hive.client.HiveAuthRequiredException;
-import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl;
-import org.apache.ambari.view.hive.utils.HdfsApiMock;
-import org.apache.ambari.view.utils.UserLocal;
-import org.apache.ambari.view.utils.hdfs.HdfsApi;
-import org.apache.ambari.view.utils.hdfs.HdfsApiException;
-import org.hamcrest.BaseMatcher;
-import org.hamcrest.Description;
-import org.json.simple.JSONObject;
-import org.junit.*;
-import org.junit.rules.ExpectedException;
-
-import javax.ws.rs.WebApplicationException;
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.easymock.EasyMock.*;
-
-public class JobLDAPServiceTest extends BaseHiveTest {
-  private JobService jobService;
-  @Rule public ExpectedException thrown = ExpectedException.none();
-
-  @BeforeClass
-  public static void startUp() throws Exception {
-    BaseHiveTest.startUp(); // super
-  }
-
-  @AfterClass
-  public static void shutDown() throws Exception {
-    BaseHiveTest.shutDown(); // super
-  }
-
-  @Override
-  @After
-  public void tearDown() throws Exception {
-    jobService.getSharedObjectsFactory().clear(HdfsApi.class);
-  }
-
-  @Override
-  protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception {
-    super.setupProperties(properties, baseDir);
-    properties.put("hive.transport.mode", "binary");
-    properties.put("hive.host", "127.0.0.1");
-    properties.put("hive.port", "42420");
-
-    properties.put("scripts.dir", "/tmp/.hiveQueries");
-    properties.put("jobs.dir", "/tmp/.hiveJobs");
-  }
-
-  private HdfsApiMock setupHdfsApiMock() throws IOException, InterruptedException, HdfsApiException {
-    HdfsApiMock hdfsApiMock = new HdfsApiMock("select * from Z");
-    HdfsApi hdfsApi = hdfsApiMock.getHdfsApi();
-    jobService.getSharedObjectsFactory().setInstance(HdfsApi.class, hdfsApi);
-    replay(hdfsApi);
-    return hdfsApiMock;
-  }
-
-  @Test
-  public void createJobNoPasswordProvided() throws Exception {
-    UserLocal.dropAllConnections(Connection.class);
-    Map<String, String> properties = new HashMap<String, String>();
-    properties.put("hive.auth", "auth=NONE;password=${ask_password}");
-    context = makeContext(properties, "ambari-qa-1", "MyHive");
-    replay(context);
-    jobService = getService(JobService.class, handler, context);
-    setupHdfsApiMock();
-
-    JobService.JobRequest request = new JobService.JobRequest();
-    request.job = new JobImpl();
-    request.job.setForcedContent("Hello world");
-
-    thrown.expect(HiveAuthRequiredException.class);
-    jobService.create(request,
-        ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
-  }
-
-  @Test
-  public void createJobNoPasswordRequired() throws Exception {
-    Map<String, String> properties = new HashMap<String, String>();
-    properties.put("hive.auth", "auth=NONE");
-    context = makeContext(properties, "ambari-qa-2", "MyHive");
-    replay(context);
-    jobService = getService(JobService.class, handler, context);
-    setupHdfsApiMock();
-
-    JobService.JobRequest request = new JobService.JobRequest();
-    request.job = new JobImpl();
-    request.job.setForcedContent("Hello world");
-
-    thrown.expect(new ExpectedJSONErrorMessage("Connection refused"));
-    jobService.create(request,
-        ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
-  }
-
-  @Test
-  public void createJobPasswordProvided() throws Exception {
-    Map<String, String> properties = new HashMap<String, String>();
-    properties.put("hive.auth", "auth=NONE;password=${ask_password}");
-    context = makeContext(properties, "ambari-qa-3", "MyHive");
-    replay(context);
-    jobService = getService(JobService.class, handler, context);
-    setupHdfsApiMock();
-
-    JobService.JobRequest request = new JobService.JobRequest();
-    request.job = new JobImpl();
-    request.job.setForcedContent("Hello world");
-
-    JobService.AuthRequest authRequest = new JobService.AuthRequest();
-    authRequest.password = "ok";
-
-    thrown.expect(new ExpectedJSONErrorMessage("Connection refused"));
-    jobService.setupPassword(authRequest);
-  }
-
-  private static class ExpectedJSONErrorMessage extends BaseMatcher<WebApplicationException> {
-    private String expectedMessage;
-
-    public ExpectedJSONErrorMessage(String message) {
-      this.expectedMessage = message;
-    }
-
-    @Override
-    public void describeTo(Description description) {
-      description.appendText(this.expectedMessage);
-    }
-
-    @Override
-    public boolean matches(Object o) {
-      JSONObject response = (JSONObject) ((WebApplicationException) o).getResponse().getEntity();
-      String message = (String) response.get("message");
-      return message.contains(expectedMessage);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
deleted file mode 100644
index 55c62e7..0000000
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.jobs;
-
-import org.apache.ambari.view.hive.ServiceTestUtils;
-import org.apache.ambari.view.hive.BaseHiveTest;
-import org.apache.ambari.view.hive.client.UserLocalConnection;
-import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl;
-import org.apache.ambari.view.hive.utils.HdfsApiMock;
-import org.apache.ambari.view.hive.client.Connection;
-import org.apache.ambari.view.hive.client.HiveClientException;
-import org.apache.ambari.view.hive.resources.savedQueries.SavedQuery;
-import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryService;
-import org.apache.ambari.view.hive.utils.BadRequestFormattedException;
-import org.apache.ambari.view.utils.hdfs.HdfsApi;
-import org.apache.ambari.view.utils.hdfs.HdfsApiException;
-import org.apache.hive.service.cli.thrift.*;
-import org.json.simple.JSONObject;
-import org.junit.*;
-import org.junit.rules.ExpectedException;
-
-import javax.ws.rs.core.Response;
-import java.io.File;
-import java.io.IOException;
-import java.util.Map;
-
-import static org.easymock.EasyMock.*;
-
-public class JobServiceTest extends BaseHiveTest {
-  private SavedQueryService savedQueryService;
-  private JobService jobService;
-  @Rule public ExpectedException thrown = ExpectedException.none();
-
-  @BeforeClass
-  public static void startUp() throws Exception {
-    BaseHiveTest.startUp(); // super
-  }
-
-  @AfterClass
-  public static void shutDown() throws Exception {
-    BaseHiveTest.shutDown(); // super
-  }
-
-  @Override
-  @After
-  public void tearDown() throws Exception {
-    jobService.getSharedObjectsFactory().clear(HdfsApi.class);
-  }
-
-  @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
-    savedQueryService = getService(SavedQueryService.class, handler, context);
-    jobService = getService(JobService.class, handler, context);
-
-    Connection hiveConnection = configureHiveConnectionMock();
-
-    new UserLocalConnection().set(hiveConnection, context);
-    jobService.setAggregator(
-        new Aggregator(
-            jobService.getResourceManager(),
-            jobService.getOperationHandleResourceManager(),
-            new AggregatorTest.MockATSParser())
-    );
-  }
-
-  @Test
-  public void createJobFromQuery() throws IOException, InterruptedException, HdfsApiException {
-    setupHdfsApiMock();
-
-    SavedQuery savedQueryForJob = createSavedQuery("Test", null);
-    JobService.JobRequest jobCreationRequest = new JobService.JobRequest();
-    jobCreationRequest.job = new JobImpl();
-    jobCreationRequest.job.setQueryId(savedQueryForJob.getId());
-
-    Response response = jobService.create(jobCreationRequest,
-        ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
-    ServiceTestUtils.assertHTTPResponseCreated(response);
-    JSONObject jobObj = (JSONObject)response.getEntity();
-
-
-    assertResponseJobSanity(jobObj);
-    Assert.assertEquals(getFieldFromJobJSON(jobObj, "queryId"), savedQueryForJob.getId());
-  }
-
-  @Test
-  public void createJobForcedContent() throws IOException, InterruptedException, HdfsApiException {
-    HdfsApiMock hdfsApiMock = setupHdfsApiMock();
-
-    JobService.JobRequest request = new JobService.JobRequest();
-    request.job = new JobImpl();
-    request.job.setForcedContent("Hello world");
-
-
-    Response response = jobService.create(request,
-        ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
-    ServiceTestUtils.assertHTTPResponseCreated(response);
-    JSONObject jobObj = (JSONObject)response.getEntity();
-
-
-    assertResponseJobSanity(jobObj);
-    Assert.assertNull(getFieldFromJobJSON(jobObj, "queryId"));
-    Assert.assertEquals("", getFieldFromJobJSON(jobObj, "forcedContent"));
-    Assert.assertEquals("Hello world", hdfsApiMock.getQueryOutputStream().toString());
-  }
-
-  @Test
-  public void createJobNoSource() throws IOException, InterruptedException {
-    HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
-    expect(hdfsApi.mkdir(anyString())).andReturn(true).anyTimes();
-    jobService.getSharedObjectsFactory().setInstance(HdfsApi.class, hdfsApi);
-    replay(hdfsApi);
-
-    JobService.JobRequest request = new JobService.JobRequest();
-    request.job = new JobImpl();
-    request.job.setForcedContent(null);
-    request.job.setQueryId(null);
-
-    thrown.expect(BadRequestFormattedException.class);
-    jobService.create(request,
-        ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
-  }
-
-  private Connection configureHiveConnectionMock() throws HiveClientException {
-    TGetOperationStatusResp statusResp = getOperationStatusResp();
-    TOperationHandle operationHandle = getExecutionOperationHandle();
-
-    Connection connection = createNiceMock(Connection.class);
-    TSessionHandle sessionHandle = new TSessionHandle();
-    THandleIdentifier handleIdentifier = new THandleIdentifier();
-    handleIdentifier.setGuid(new byte[]{1,2,3,4,5,6,7,8});
-    sessionHandle.setSessionId(handleIdentifier);
-    expect(connection.openSession()).andReturn(sessionHandle).anyTimes();
-    expect(connection.executeAsync((TSessionHandle) anyObject(), anyString())).andReturn(operationHandle).anyTimes();
-    expect(connection.getLogs(anyObject(TOperationHandle.class))).andReturn("some logs").anyTimes();
-    expect(connection.getOperationStatus(anyObject(TOperationHandle.class))).andReturn(statusResp).anyTimes();
-
-    replay(connection);
-    return connection;
-  }
-
-  private TGetOperationStatusResp getOperationStatusResp() {
-    TStatus status = new TStatus();
-    status.setStatusCode(TStatusCode.SUCCESS_STATUS);
-
-    TGetOperationStatusResp statusResp = new TGetOperationStatusResp();
-    statusResp.setStatus(status);
-
-    return statusResp;
-  }
-
-  private TOperationHandle getExecutionOperationHandle() {
-    THandleIdentifier handleIdentifier = new THandleIdentifier();
-    handleIdentifier.setGuid("some guid".getBytes());
-    handleIdentifier.setSecret("some secret".getBytes());
-
-    TOperationHandle operationHandle = new TOperationHandle();
-    operationHandle.setHasResultSet(true);
-    operationHandle.setModifiedRowCount(0);
-    operationHandle.setOperationType(TOperationType.EXECUTE_STATEMENT);
-    operationHandle.setOperationId(handleIdentifier);
-    return operationHandle;
-  }
-
-  @Override
-  protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception {
-    super.setupProperties(properties, baseDir);
-    properties.put("scripts.dir", "/tmp/.hiveQueries");
-    properties.put("jobs.dir", "/tmp/.hiveJobs");
-  }
-
-  public static Response doCreateSavedQuery(String title, String path, SavedQueryService service) {
-    SavedQueryService.SavedQueryRequest request = new SavedQueryService.SavedQueryRequest();
-    request.savedQuery = new SavedQuery();
-    request.savedQuery.setTitle(title);
-    request.savedQuery.setQueryFile(path);
-
-    return service.create(request,
-        ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo());
-  }
-
-  private SavedQuery createSavedQuery(String title, String path) {
-    Response response = doCreateSavedQuery(title, path, savedQueryService);
-    JSONObject obj = (JSONObject)response.getEntity();
-    SavedQuery query = ((SavedQuery) obj.get("savedQuery"));
-    return query;
-  }
-
-
-  private Object getFieldFromJobJSON(JSONObject jobObj, String field) {
-    return ((Map) jobObj.get("job")).get(field);
-  }
-
-  private HdfsApiMock setupHdfsApiMock() throws IOException, InterruptedException, HdfsApiException {
-    HdfsApiMock hdfsApiMock = new HdfsApiMock("select * from Z");
-    HdfsApi hdfsApi = hdfsApiMock.getHdfsApi();
-    jobService.getSharedObjectsFactory().setInstance(HdfsApi.class, hdfsApi);
-    replay(hdfsApi);
-    return hdfsApiMock;
-  }
-
-  private void assertResponseJobSanity(JSONObject jobObj) {
-    Assert.assertTrue(jobObj.containsKey("job"));
-    Assert.assertNotNull(((Map) jobObj.get("job")).get("id"));
-    Assert.assertNotNull(((Map) jobObj.get("job")).get("queryFile"));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
deleted file mode 100644
index 1e04dd7..0000000
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.jobs;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-public class LogParserTest {
-    @Test
-    public void testParseMRLog() {
-        String log = "INFO : Number of reduce tasks determined at compile time: 1\n" +
-            "INFO : In order to change the average load for a reducer (in bytes):\n" +
-            "INFO : set hive.exec.reducers.bytes.per.reducer=<number>\n" +
-            "INFO : In order to limit the maximum number of reducers:\n" +
-            "INFO : set hive.exec.reducers.max=<number>\n" +
-            "INFO : In order to set a constant number of reducers:\n" +
-            "INFO : set mapreduce.job.reduces=<number>\n" +
-            "WARN : Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.\n" +
-            "INFO : number of splits:1\n" +
-            "INFO : Submitting tokens for job: job_1421248330903_0003\n" +
-            "INFO : The url to track the job: http://dataworker.hortonworks.com:8088/proxy/application_1421248330903_0003/\n" +
-            "INFO : Starting Job = job_1421248330903_0003, Tracking URL = http://dataworker.hortonworks.com:8088/proxy/application_1421248330903_0003/\n" +
-            "INFO : Kill Command = /usr/hdp/current/hadoop-client/bin/hadoop job -kill job_1421248330903_0003\n" +
-            "INFO : Hadoop job information for Stage-1: number of mappers: 1; number of reducers: 1\n" +
-            "INFO : 2015-01-21 15:03:55,979 Stage-1 map = 0%, reduce = 0%\n" +
-            "INFO : 2015-01-21 15:04:07,503 Stage-1 map = 100%, reduce = 0%, Cumulative CPU 0.79 sec\n" +
-            "INFO : 2015-01-21 15:04:17,384 Stage-1 map = 100%, reduce = 100%, Cumulative CPU 1.86 sec\n" +
-            "INFO : MapReduce Total cumulative CPU time: 1 seconds 860 msec\n" +
-            "INFO : Ended Job = job_1421248330903_0003";
-
-        LogParser p = LogParser.parseLog(log);
-        Assert.assertEquals(1, p.getAppsList().size());
-        Assert.assertEquals("application_1421248330903_0003",(((LogParser.AppId) (p.getAppsList().toArray())[0])
-                                                            .getIdentifier()));
-    }
-
-    @Test
-    public void testParseTezLog() {
-        String log = "INFO : Tez session hasn't been created yet. Opening session\n" +
-            "INFO :\n" +
-            "\n" +
-            "INFO : Status: Running (Executing on YARN cluster with App id application_1423156117563_0003)\n" +
-            "\n" +
-            "INFO : Map 1: -/- Reducer 2: 0/1\n" +
-            "INFO : Map 1: 0/1 Reducer 2: 0/1\n" +
-            "INFO : Map 1: 0/1 Reducer 2: 0/1\n" +
-            "INFO : Map 1: 0(+1)/1 Reducer 2: 0/1\n" +
-            "INFO : Map 1: 0(+1)/1 Reducer 2: 0/1\n" +
-            "INFO : Map 1: 1/1 Reducer 2: 0(+1)/1\n" +
-            "INFO : Map 1: 1/1 Reducer 2: 1/1 ";
-
-        LogParser p = LogParser.parseLog(log);
-        Assert.assertEquals(1, p.getAppsList().size());
-        Assert.assertEquals("application_1423156117563_0003",(((LogParser.AppId) (p.getAppsList().toArray())[0])
-            .getIdentifier()));
-    }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
deleted file mode 100644
index 026acc3..0000000
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.resources;
-
-import org.apache.ambari.view.hive.BaseHiveTest;
-import org.apache.ambari.view.hive.resources.resources.FileResourceItem;
-import org.apache.ambari.view.hive.resources.resources.FileResourceService;
-import org.apache.ambari.view.hive.utils.NotFoundFormattedException;
-import org.json.simple.JSONObject;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriBuilder;
-import javax.ws.rs.core.UriInfo;
-import java.net.URI;
-
-import static org.easymock.EasyMock.*;
-
-public class FileResourceServiceTest extends BaseHiveTest {
-  @Rule public ExpectedException thrown = ExpectedException.none();
-  private FileResourceService resourceService;
-
-  @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
-    resourceService = getService(FileResourceService.class, handler, context);
-  }
-
-  private Response doCreateFileResourceItem() {
-    FileResourceService.ResourceRequest request = new FileResourceService.ResourceRequest();
-    request.fileResource = new FileResourceItem();
-    request.fileResource.setPath("/tmp/file.jar");
-    request.fileResource.setName("TestFileResourceItem");
-
-    UriInfo uriInfo = createNiceMock(UriInfo.class);
-    URI uri = UriBuilder.fromUri("http://host/a/b").build();
-    expect(uriInfo.getAbsolutePath()).andReturn(uri);
-
-    HttpServletResponse resp_obj = createNiceMock(HttpServletResponse.class);
-
-    resp_obj.setHeader(eq("Location"), anyString());
-
-    replay(uriInfo, resp_obj);
-    return resourceService.create(request, resp_obj, uriInfo);
-  }
-
-  @Test
-  public void createFileResourceItem() {
-    Response response = doCreateFileResourceItem();
-    Assert.assertEquals(201, response.getStatus());
-
-    JSONObject obj = (JSONObject)response.getEntity();
-    Assert.assertTrue(obj.containsKey("fileResource"));
-    Assert.assertNotNull(((FileResourceItem) obj.get("fileResource")).getId());
-    Assert.assertFalse(((FileResourceItem) obj.get("fileResource")).getId() == null);
-  }
-
-  @Test
-  public void resourceNotFound() {
-    thrown.expect(NotFoundFormattedException.class);
-    resourceService.getOne("4242");
-  }
-
-  @Test
-  public void updateFileResourceItem() {
-    Response createdFileResourceItem = doCreateFileResourceItem();
-    Object createdUdfId = ((FileResourceItem) ((JSONObject) createdFileResourceItem.getEntity()).get("fileResource")).getId();
-
-    FileResourceService.ResourceRequest request = new FileResourceService.ResourceRequest();
-    request.fileResource = new FileResourceItem();
-    request.fileResource.setPath("/tmp/updatedFileResourceItem.jar");
-    request.fileResource.setName("TestFileResourceItem2");
-
-    Response response = resourceService.update(request, String.valueOf(createdUdfId));
-    Assert.assertEquals(204, response.getStatus());
-
-    Response response2 = resourceService.getOne(String.valueOf(createdUdfId));
-    Assert.assertEquals(200, response2.getStatus());
-
-    JSONObject obj = ((JSONObject) response2.getEntity());
-    Assert.assertTrue(obj.containsKey("fileResource"));
-    Assert.assertEquals(((FileResourceItem) obj.get("fileResource")).getName(), request.fileResource.getName());
-    Assert.assertEquals(((FileResourceItem) obj.get("fileResource")).getPath(), request.fileResource.getPath());
-  }
-
-  @Test
-  public void deleteFileResourceItem() {
-    Response createdFileResourceItem = doCreateFileResourceItem();
-    Object createdUdfId = ((FileResourceItem) ((JSONObject) createdFileResourceItem.getEntity()).get("fileResource")).getId();
-
-    Response response = resourceService.delete(String.valueOf(createdUdfId));
-    Assert.assertEquals(204, response.getStatus());
-
-    thrown.expect(NotFoundFormattedException.class);
-    resourceService.getOne(String.valueOf(createdUdfId));
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManagerTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManagerTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManagerTest.java
deleted file mode 100644
index b9fac9f..0000000
--- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManagerTest.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.hive.resources.savedQueries;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-public class SavedQueryResourceManagerTest {
-
-  @Test
-  public void testMakeShortQuery() throws Exception {
-    String query = "select * from table;";
-    String shortQuery = SavedQueryResourceManager.makeShortQuery(query);
-    Assert.assertEquals(query, shortQuery);
-  }
-
-  @Test
-  public void testMakeShortQuery42Trim() throws Exception {
-    String str50 = "12345678901234567890123456789012345678901234567890";
-    String str42 = "123456789012345678901234567890123456789012";
-    String shortQuery = SavedQueryResourceManager.makeShortQuery(str50);
-    Assert.assertEquals(str42, shortQuery);
-  }
-
-  @Test
-  public void testMakeShortQueryRemoveSet() throws Exception {
-    String str50 = "set hive.execution.engine=tez;\nselect * from table;";
-    String shortQuery = SavedQueryResourceManager.makeShortQuery(str50);
-    Assert.assertEquals("select * from table;", shortQuery);
-
-    str50 = "set hive.execution.engine = tez;  \n select * from table;";
-    shortQuery = SavedQueryResourceManager.makeShortQuery(str50);
-    Assert.assertEquals("select * from table;", shortQuery);
-
-    str50 = "SET  property=value;\nselect * from table;";
-    shortQuery = SavedQueryResourceManager.makeShortQuery(str50);
-    Assert.assertEquals("select * from table;", shortQuery);
-  }
-}
\ No newline at end of file