You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by we...@apache.org on 2017/05/25 20:50:09 UTC

[30/31] hive git commit: HIVE-15834: Add unit tests for org.json usage on master (Daniel Voros via Zoltan Haindrich)

HIVE-15834: Add unit tests for org.json usage on master (Daniel Voros via Zoltan Haindrich)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2fa4dc27
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2fa4dc27
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2fa4dc27

Branch: refs/heads/hive-14535
Commit: 2fa4dc277e0cd28261602c392c2f55d040d22677
Parents: 811e599
Author: Daniel Voros <da...@gmail.com>
Authored: Thu May 25 21:16:44 2017 +0200
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Thu May 25 21:16:44 2017 +0200

----------------------------------------------------------------------
 .../hadoop/hive/common/jsonexplain/Op.java      |   4 +-
 .../hadoop/hive/common/jsonexplain/TestOp.java  |  81 +++++
 .../hive/common/jsonexplain/TestStage.java      | 194 ++++++++++++
 .../hive/common/jsonexplain/TestVertex.java     | 108 +++++++
 .../jsonexplain/tez/TestTezJsonParser.java      |  53 ++++
 .../apache/hadoop/hive/ql/exec/ExplainTask.java |  19 +-
 .../apache/hadoop/hive/ql/hooks/ATSHook.java    |   4 +-
 .../hadoop/hive/ql/exec/TestExplainTask.java    | 293 ++++++++++++++++++-
 .../hadoop/hive/ql/hooks/TestATSHook.java       |  59 ++++
 9 files changed, 796 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/2fa4dc27/common/src/java/org/apache/hadoop/hive/common/jsonexplain/Op.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/Op.java b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/Op.java
index 39c44f1..e9eb5a7 100644
--- a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/Op.java
+++ b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/Op.java
@@ -29,6 +29,7 @@ import java.util.Set;
 
 import org.apache.hadoop.hive.common.jsonexplain.Vertex.VertexType;
 import org.json.JSONArray;
+import com.google.common.annotations.VisibleForTesting;
 import org.json.JSONException;
 import org.json.JSONObject;
 
@@ -85,7 +86,8 @@ public final class Op {
     }
   }
 
-  private void inlineJoinOp() throws Exception {
+  @VisibleForTesting
+  void inlineJoinOp() throws Exception {
     // inline map join operator
     if (this.type == OpType.MAPJOIN) {
       // get the map for posToVertex

http://git-wip-us.apache.org/repos/asf/hive/blob/2fa4dc27/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestOp.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestOp.java b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestOp.java
new file mode 100644
index 0000000..eb5dca4
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestOp.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.hadoop.hive.common.jsonexplain.tez.TezJsonParser;
+import org.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.*;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestOp {
+
+  private ObjectMapper objectMapper = new ObjectMapper();
+  private TezJsonParser tezJsonParser;
+
+  @Before
+  public void setUp() throws Exception {
+    this.tezJsonParser = new TezJsonParser();
+  }
+
+
+  @Test
+  public void testInlineJoinOpJsonHandling() throws Exception {
+    String jsonString = "{" +
+            "\"input vertices:\":{\"a\":\"AVERTEX\"}," + "\"condition map:\": [" +
+            "{\"c1\": \"{\\\"type\\\": \\\"type\\\", \\\"left\\\": \\\"left\\\", " +
+            "\\\"right\\\": \\\"right\\\"}\"}]," +
+            "\"keys:\":{\"left\":\"AKEY\", \"right\":\"BKEY\"}}";
+    JSONObject mapJoin = new JSONObject(jsonString);
+
+    Vertex vertexB = new Vertex("vertex-b", null, null, tezJsonParser);
+    Op dummyOp = new Op("Dummy Op", "dummy-id", "output-vertex-name", null, Collections.EMPTY_LIST,
+            null, mapJoin, null, tezJsonParser);
+    vertexB.outputOps.add(dummyOp);
+
+    Vertex vertexC = new Vertex("vertex-c", null, null, tezJsonParser);
+    vertexC.outputOps.add(dummyOp);
+
+
+    Vertex vertexA = new Vertex("vertex-a", null, null, tezJsonParser);
+    vertexA.tagToInput = new HashMap<>();
+    vertexA.tagToInput.put("left", "vertex-b");
+    vertexA.tagToInput.put("right", "vertex-c");
+    vertexA.parentConnections.add(new Connection("left", vertexB));
+    vertexA.parentConnections.add(new Connection("right", vertexC));
+
+
+    Map<String, String> attrs = new HashMap<>();
+
+    Op uut = new Op("Map Join Operator", "op-id", "output-vertex-name", null, Collections.EMPTY_LIST,
+            attrs, mapJoin, vertexA, tezJsonParser);
+    uut.inlineJoinOp();
+
+    assertEquals(1, attrs.size());
+
+    String result = attrs.get("Conds:");
+    String expected = "dummy-id.AKEY=dummy-id.BKEY(type)";
+    assertEquals(expected, result);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/2fa4dc27/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestStage.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestStage.java b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestStage.java
new file mode 100644
index 0000000..e344eb3
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestStage.java
@@ -0,0 +1,194 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain;
+
+import org.apache.hadoop.hive.common.jsonexplain.tez.TezJsonParser;
+import org.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+
+public class TestStage {
+
+  private Stage uut;
+  private Stage stageA;
+  private Stage stageB;
+  private TezJsonParser tezJsonParser;
+
+  @Before
+  public void setUp() {
+    this.tezJsonParser = new TezJsonParser();
+    this.uut = new Stage("uut", tezJsonParser);
+    this.stageA = new Stage("stage-a", tezJsonParser);
+    this.stageB = new Stage("stage-b", tezJsonParser);
+  }
+
+  @Test
+  public void testAddDependencyNonRoot() throws Exception {
+    Map<String, Stage> children = new LinkedHashMap<>();
+    children.put("a", stageA);
+    children.put("b", stageB);
+
+
+    String jsonString = "{\"DEPENDENT STAGES\":\"a,b\"}";
+    JSONObject names = new JSONObject(jsonString);
+
+    uut.addDependency(names, children);
+
+    assertEquals(2, uut.parentStages.size());
+    assertEquals(stageA, uut.parentStages.get(0));
+    assertEquals(stageB, uut.parentStages.get(1));
+
+    assertEquals(1, stageA.childStages.size());
+    assertEquals(uut, stageA.childStages.get(0));
+
+    assertEquals(1, stageB.childStages.size());
+    assertEquals(uut, stageB.childStages.get(0));
+  }
+
+  @Test
+  public void testAddDependencyRoot() throws Exception {
+    Map<String, Stage> children = new LinkedHashMap<>();
+    children.put("a", stageA);
+    children.put("b", stageB);
+
+    String jsonString = "{\"ROOT STAGE\":\"X\",\"DEPENDENT STAGES\":\"a,b\"}";
+    JSONObject names = new JSONObject(jsonString);
+
+    uut.addDependency(names, children);
+
+    assertEquals(2, uut.parentStages.size());
+    assertEquals(1, stageA.childStages.size());
+    assertEquals(1, stageB.childStages.size());
+  }
+
+
+  @Test
+  public void testExtractVertexNonTez() throws Exception {
+    String jsonString = "{\"OperatorName\":{\"a\":\"A\",\"b\":\"B\"}," +
+            "\"attr1\":\"ATTR1\"}";
+    JSONObject object = new JSONObject(jsonString);
+
+    uut.extractVertex(object);
+
+    assertEquals("OperatorName", uut.op.name);
+    assertEquals(1, uut.attrs.size());
+    assertEquals("ATTR1", uut.attrs.get("attr1"));
+  }
+
+  @Test
+  public void testExtractVertexTezNoEdges() throws Exception {
+    String jsonString = "{\"Tez\":{\"a\":\"A\",\"Vertices:\":{\"v1\":{}}}}";
+    JSONObject object = new JSONObject(jsonString);
+    uut.extractVertex(object);
+
+    assertEquals(1, uut.vertexs.size());
+    assertTrue(uut.vertexs.containsKey("v1"));
+  }
+
+  @Test
+  public void testExtractVertexTezWithOneEdge() throws Exception {
+    String jsonString = "{\"Tez\":{\"a\":\"A\"," +
+            "\"Vertices:\":{\"v1\":{},\"v2\":{}}," +
+            "\"Edges:\":{\"v2\":{\"parent\":\"v1\",\"type\":\"TYPE\"}}}}";
+    JSONObject object = new JSONObject(jsonString);
+    uut.extractVertex(object);
+
+    assertEquals(2, uut.vertexs.size());
+    assertTrue(uut.vertexs.containsKey("v1"));
+    assertTrue(uut.vertexs.containsKey("v2"));
+
+    assertEquals(0, uut.vertexs.get("v1").parentConnections.size());
+    assertEquals(1, uut.vertexs.get("v2").parentConnections.size());
+    assertEquals("v1", uut.vertexs.get("v2").parentConnections.get(0).from.name);
+    assertEquals("TYPE", uut.vertexs.get("v2").parentConnections.get(0).type);
+
+  }
+
+
+  @Test
+  public void testExtractVertexTezWithOneToManyEdge() throws Exception {
+    String jsonString = "{\"Tez\":{\"a\":\"A\"," +
+            "\"Vertices:\":{\"v1\":{},\"v2\":{},\"v3\":{}}," +
+            "\"Edges:\":{\"v1\":[{\"parent\":\"v2\",\"type\":\"TYPE1\"}," +
+            "{\"parent\":\"v3\",\"type\":\"TYPE2\"}]}}}";
+    JSONObject object = new JSONObject(jsonString);
+
+    uut.extractVertex(object);
+
+    assertEquals(3, uut.vertexs.size());
+    assertTrue(uut.vertexs.containsKey("v1"));
+    assertTrue(uut.vertexs.containsKey("v2"));
+    assertTrue(uut.vertexs.containsKey("v3"));
+
+    assertEquals(2, uut.vertexs.get("v1").parentConnections.size());
+    assertEquals(1, uut.vertexs.get("v2").children.size());
+    assertEquals(1, uut.vertexs.get("v3").children.size());
+    assertEquals("v1", uut.vertexs.get("v2").children.get(0).name);
+    assertEquals("v1", uut.vertexs.get("v3").children.get(0).name);
+    assertEquals("TYPE1", uut.vertexs.get("v1").parentConnections.get(0).type);
+    assertEquals("TYPE2", uut.vertexs.get("v1").parentConnections.get(1).type);
+
+  }
+
+  @Test
+  public void testExtractOpEmptyObject() throws Exception {
+    JSONObject object = new JSONObject();
+    Op result = uut.extractOp("op-name", object);
+
+    assertEquals("op-name", result.name);
+    assertEquals(0, result.attrs.size());
+    assertNull(result.vertex);
+  }
+
+  @Test
+  public void testExtractOpSimple() throws Exception {
+    String jsonString = "{\"a\":\"A\",\"b\":\"B\"}";
+    JSONObject object = new JSONObject(jsonString);
+
+    Op result = uut.extractOp("op-name", object);
+
+    assertEquals("op-name", result.name);
+    assertEquals(2, result.attrs.size());
+    assertNull(result.vertex);
+  }
+
+  @Test
+  public void testExtract() throws Exception {
+    String jsonString = "{\"b\":{\"b2\":\"B2\",\"b1\":\"B1\"}," +
+            "\"Processor Tree:\":{\"a1\":{\"t1\":\"T1\"}}}";
+    JSONObject object = new JSONObject(jsonString);
+
+    Op result = uut.extractOp("op-name", object);
+    assertEquals("op-name", result.name);
+    assertEquals(2, result.attrs.size());
+
+    List<String> attrs = new ArrayList<>();
+    assertEquals("B1", result.attrs.get("b1"));
+    assertEquals("B2", result.attrs.get("b2"));
+    assertNotNull(result.vertex);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/2fa4dc27/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestVertex.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestVertex.java b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestVertex.java
new file mode 100644
index 0000000..4303be7
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestVertex.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain;
+
+import org.apache.hadoop.hive.common.jsonexplain.tez.TezJsonParser;
+import org.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestVertex {
+
+  private TezJsonParser tezJsonParser;
+
+  @Before
+  public void setUp() throws Exception {
+    this.tezJsonParser = new TezJsonParser();
+  }
+
+
+  @Test
+  public void testExtractOpTree() throws Exception {
+    JSONObject object = new JSONObject("{\"Join:\":[{},{}]}");
+
+    Vertex uut = new Vertex("name", object, null, tezJsonParser);
+    uut.extractOpTree();
+
+    assertEquals(2, uut.mergeJoinDummyVertexs.size());
+  }
+
+  @Test
+  public void testExtractOpNonJsonChildrenShouldThrow() throws Exception {
+    String jsonString = "{\"opName\":{\"children\":\"not-json\"}}";
+    JSONObject operator = new JSONObject(jsonString);
+
+    Vertex uut = new Vertex("name", null, null, tezJsonParser);
+
+    try {
+      uut.extractOp(operator, null);
+    } catch (Exception e) {
+      assertEquals("Unsupported operator name's children operator is neither a jsonobject nor a jsonarray", e.getMessage());
+    }
+  }
+
+  @Test
+  public void testExtractOpNoChildrenOperatorId() throws Exception {
+    String jsonString = "{\"opName\":{\"OperatorId:\":\"operator-id\"}}";
+    JSONObject operator = new JSONObject(jsonString);
+
+    Vertex uut = new Vertex("name", null, null, tezJsonParser);
+
+    Op result = uut.extractOp(operator, null);
+    assertEquals("opName", result.name);
+    assertEquals("operator-id", result.operatorId);
+    assertEquals(0, result.children.size());
+    assertEquals(0, result.attrs.size());
+  }
+
+  @Test
+  public void testExtractOpOneChild() throws Exception {
+    String jsonString = "{\"opName\":{\"children\":{\"childName\":" +
+            "{\"OperatorId:\":\"child-operator-id\"}}}}";
+    JSONObject operator = new JSONObject(jsonString);
+
+    Vertex uut = new Vertex("name", null, null, tezJsonParser);
+
+    Op result = uut.extractOp(operator, null);
+    assertEquals("opName", result.name);
+    assertEquals(1, result.children.size());
+    assertEquals("childName", result.children.get(0).name);
+    assertEquals("child-operator-id", result.children.get(0).operatorId);
+  }
+
+  @Test
+  public void testExtractOpMultipleChildren() throws Exception {
+    String jsonString = "{\"opName\":{\"children\":[" +
+            "{\"childName1\":{\"OperatorId:\":\"child-operator-id1\"}}," +
+            "{\"childName2\":{\"OperatorId:\":\"child-operator-id2\"}}]}}";
+    JSONObject operator = new JSONObject(jsonString);
+
+    Vertex uut = new Vertex("name", null, null, tezJsonParser);
+
+    Op result = uut.extractOp(operator, null);
+    assertEquals("opName", result.name);
+    assertEquals(2, result.children.size());
+    assertEquals("childName1", result.children.get(0).name);
+    assertEquals("child-operator-id1", result.children.get(0).operatorId);
+    assertEquals("childName2", result.children.get(1).name);
+    assertEquals("child-operator-id2", result.children.get(1).operatorId);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/2fa4dc27/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java
new file mode 100644
index 0000000..cf34ab8
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain.tez;
+
+import org.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestTezJsonParser {
+
+  private TezJsonParser uut;
+
+  @Before
+  public void setUp() throws Exception {
+    this.uut = new TezJsonParser();
+  }
+
+  @Test
+  public void testExtractStagesAndPlans() throws Exception {
+    String jsonString = "{\"STAGE DEPENDENCIES\":{\"s1\":{\"ROOT STAGE\":\"\"}," +
+            "\"s2\":{\"DEPENDENT STAGES\":\"s1\"}},\"STAGE PLANS\":{}}";
+    JSONObject input = new JSONObject(jsonString);
+
+    uut.extractStagesAndPlans(input);
+
+    assertEquals(2, uut.stages.size());
+    assertEquals(1, uut.stages.get("s1").childStages.size());
+    assertEquals("s2", uut.stages.get("s1").childStages.get(0).internalName);
+    assertEquals(0, uut.stages.get("s2").childStages.size());
+    assertEquals(0, uut.stages.get("s1").parentStages.size());
+    assertEquals(1, uut.stages.get("s2").parentStages.size());
+    assertEquals("s1", uut.stages.get("s2").parentStages.get(0).internalName);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/2fa4dc27/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
index 8ddb8d6..902664d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
@@ -42,6 +42,7 @@ import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeMap;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.common.jsonexplain.JsonParser;
@@ -132,7 +133,8 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable {
    * {"input_tables":[{"tablename": "default@test_sambavi_v1", "tabletype": "TABLE"}],
    *  "input partitions":["default@srcpart@ds=2008-04-08/hr=11"]}
    */
-  private static JSONObject getJSONDependencies(ExplainWork work)
+  @VisibleForTesting
+  static JSONObject getJSONDependencies(ExplainWork work)
       throws Exception {
     assert(work.getDependency());
 
@@ -202,7 +204,8 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable {
   private static String falseCondNameVectorizationEnabled =
       HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED.varname + " IS false";
 
-  private ImmutablePair<Boolean, JSONObject> outputPlanVectorization(PrintStream out, boolean jsonOutput)
+  @VisibleForTesting
+  ImmutablePair<Boolean, JSONObject> outputPlanVectorization(PrintStream out, boolean jsonOutput)
       throws Exception {
 
     if (out != null) {
@@ -422,7 +425,8 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable {
     }
   }
 
-  private JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work)
+  @VisibleForTesting
+  JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work)
       throws Exception {
 
     BaseSemanticAnalyzer analyzer = work.getAnalyzer();
@@ -486,7 +490,8 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable {
     return sb.toString();
   }
 
-  private JSONObject outputMap(Map<?, ?> mp, boolean hasHeader, PrintStream out,
+  @VisibleForTesting
+  JSONObject outputMap(Map<?, ?> mp, boolean hasHeader, PrintStream out,
       boolean extended, boolean jsonOutput, int indent) throws Exception {
 
     TreeMap<Object, Object> tree = getBasictypeKeyedMap(mp);
@@ -696,7 +701,8 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable {
     return outputPlan(work, out, extended, jsonOutput, indent, "");
   }
 
-  private JSONObject outputPlan(Object work, PrintStream out,
+  @VisibleForTesting
+  JSONObject outputPlan(Object work, PrintStream out,
       boolean extended, boolean jsonOutput, int indent, String appendToHeader) throws Exception {
     // Check if work has an explain annotation
     Annotation note = AnnotationUtils.getAnnotation(work.getClass(), Explain.class);
@@ -1072,7 +1078,8 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable {
     return null;
   }
 
-  private JSONObject outputDependencies(Task<?> task,
+  @VisibleForTesting
+  JSONObject outputDependencies(Task<?> task,
       PrintStream out, JSONObject parentJson, boolean jsonOutput, boolean taskType, int indent)
       throws Exception {
 

http://git-wip-us.apache.org/repos/asf/hive/blob/2fa4dc27/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
index f44661e..84f992a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
@@ -32,6 +32,7 @@ import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.llap.registry.impl.LlapRegistryService;
@@ -78,7 +79,8 @@ public class ATSHook implements ExecuteWithHookContext {
   private static boolean defaultATSDomainCreated = false;
   private static final String DEFAULT_ATS_DOMAIN = "hive_default_ats";
 
-  private enum OtherInfoTypes {
+  @VisibleForTesting
+  enum OtherInfoTypes {
     QUERY, STATUS, TEZ, MAPRED, INVOKER_INFO, SESSION_ID, THREAD_NAME, VERSION,
     CLIENT_IP_ADDRESS, HIVE_ADDRESS, HIVE_INSTANCE_TYPE, CONF, PERF, LLAP_APP_ID
   };

http://git-wip-us.apache.org/repos/asf/hive/blob/2fa4dc27/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java
index 805bc5b..cac1fad 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java
@@ -18,26 +18,52 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import static org.junit.Assert.assertEquals;
-
-import java.io.PrintStream;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.commons.io.output.ByteArrayOutputStream;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.hooks.Entity;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.ExplainConfiguration;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
-import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.*;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
-import org.apache.hadoop.hive.ql.plan.ExplainWork;
-import org.apache.hadoop.hive.ql.plan.TableScanDesc;
-import org.junit.Ignore;
+import org.apache.hadoop.hive.ql.plan.api.StageType;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.junit.Before;
 import org.junit.Test;
 
+import java.io.PrintStream;
+import java.util.*;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
 public class TestExplainTask {
 
+  private static final String BACKUP_ID = "backup-id-mock";
+  private static final String AST = "ast-mock";
+
+  private PrintStream out;
+  private ExplainTask uut;
+  private ObjectMapper objectMapper = new ObjectMapper();
+
+  @Before
+  public void setUp() {
+    uut = new ExplainTask();
+    uut.conf = mock(HiveConf.class);
+    out = mock(PrintStream.class);
+  }
+
   public static class DummyExplainDesc<K, V> extends TableScanDesc {
     private static final long serialVersionUID = 1L;
     private Map<K, V> explainResult;
@@ -139,4 +165,249 @@ public class TestExplainTask {
     return baos.toString();
   }
 
+  @Test
+  public void testGetJSONDependenciesJsonShhouldMatch() throws Exception {
+    ExplainWork work = mockExplainWork();
+
+    when(work.getDependency()).thenReturn(true);
+
+    // Mock inputs
+    HashSet<ReadEntity> inputs = new HashSet<>();
+
+    // One input table
+    Table table = mock(Table.class);
+    when(table.getCompleteName()).thenReturn("table-name-mock");
+    when(table.getTableType()).thenReturn(TableType.EXTERNAL_TABLE);
+    ReadEntity input1 = mock(ReadEntity.class);
+    when(input1.getType()).thenReturn(Entity.Type.TABLE);
+    when(input1.getTable()).thenReturn(table);
+    inputs.add(input1);
+
+    // And one partition
+    Partition partition = mock(Partition.class);
+    when(partition.getCompleteName()).thenReturn("partition-name-mock");
+    ReadEntity input2 = mock(ReadEntity.class);
+    when(input2.getType()).thenReturn(Entity.Type.PARTITION);
+    when(input2.getPartition()).thenReturn(partition);
+    inputs.add(input2);
+
+    when(work.getInputs()).thenReturn(inputs);
+
+    JsonNode result = objectMapper.readTree(ExplainTask.getJSONDependencies(work).toString());
+    JsonNode expected = objectMapper.readTree("{\"input_partitions\":[{\"partitionName\":" +
+            "\"partition-name-mock\"}],\"input_tables\":[{\"tablename\":\"table-name-mock\"," +
+            "\"tabletype\":\"EXTERNAL_TABLE\"}]}");
+
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testGetJSONPlan() throws Exception {
+    when(uut.conf.getVar(HiveConf.ConfVars.HIVESTAGEIDREARRANGE)).thenReturn("EXECUTION");
+    Task mockTask = mockTask();
+    when(mockTask.getId()).thenReturn("mockTaskId");
+    ExplainWork explainWorkMock = mockExplainWork();
+    when(mockTask.getWork()).thenReturn(explainWorkMock);
+    List<Task<?>> tasks = Arrays.<Task<?>>asList(mockTask);
+
+
+    JsonNode result = objectMapper.readTree(uut.getJSONPlan(null, tasks, null, true,
+            false, false).toString());
+    JsonNode expected = objectMapper.readTree("{\"STAGE DEPENDENCIES\":{\"mockTaskId\":" +
+            "{\"ROOT STAGE\":\"TRUE\",\"BACKUP STAGE\":\"backup-id-mock\"}},\"STAGE PLANS\":" +
+            "{\"mockTaskId\":{}}}");
+
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testOutputDependenciesJsonShouldMatch() throws Exception {
+    Task<? extends ExplainTask> task = mockTask();
+
+    JsonNode result = objectMapper.readTree(
+            uut.outputDependencies(task, out, null, true, true, 0).toString());
+    JsonNode expected = objectMapper.readTree("{\"ROOT STAGE\":\"TRUE\",\"BACKUP STAGE\":" +
+            "\""+BACKUP_ID+"\",\"TASK TYPE\":\"EXPLAIN\"}");
+
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testGetJSONLogicalPlanJsonShouldMatch() throws Exception {
+    JsonNode result = objectMapper.readTree(
+            uut.getJSONLogicalPlan(null, mockExplainWork()).toString());
+    JsonNode expected = objectMapper.readTree("{}");
+
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testOutputMapJsonShouldMatch() throws Exception {
+    Map<Object, Object> map = new LinkedHashMap<>();
+
+    // String
+    map.put("key-1", "value-1");
+
+    // SparkWork
+    map.put("spark-work", new SparkWork("spark-work"));
+
+    // Empty list
+    List<Object> emptList = Collections.emptyList();
+    map.put("empty-list", emptList);
+
+    // List of TezWork.Dependency
+    List<Object> tezList1 = new ArrayList<>(Arrays.asList(new Object[] {mockTezWorkDependency()}));
+    map.put("tez-list-1", tezList1);
+    List<Object> tezList2 = new ArrayList<>(
+            Arrays.asList(new Object[] {mockTezWorkDependency(), mockTezWorkDependency()}));
+    map.put("tez-list-2", tezList2);
+
+    // List of SparkWork.Dependency
+    List<Object> sparkList1 = new ArrayList<>(
+            Arrays.asList(new Object[]{mockSparkWorkDependency()}));
+    map.put("spark-list-1", sparkList1);
+    List<Object> sparkList2 = new ArrayList<>(
+            Arrays.asList(new Object[]{mockSparkWorkDependency(), mockSparkWorkDependency()}));
+    map.put("spark-list-2", sparkList2);
+
+    // inner Map
+    Map<Object, Object> innerMap = new LinkedHashMap<>();
+    innerMap.put("inner-key-1", "inner-value-1");
+    innerMap.put("inner-key-2", tezList1);
+    map.put("map-1", innerMap);
+
+    JsonNode result = objectMapper.readTree(
+            uut.outputMap(map, false, null, false, true, 0).toString());
+    JsonNode expected = objectMapper.readTree("{\"key-1\":\"value-1\",\"tez-list-2\":" +
+            "[{\"parent\":\"name\"}," + "{\"parent\":\"name\"}],\"tez-list-1\":" +
+            "{\"parent\":\"name\"},\"empty-list\":\"[]\",\"spark-list-2\":" +
+            "[{\"parent\":\"mock-name\"},{\"parent\":\"mock-name\"}]," +
+            "\"spark-list-1\":{\"parent\":" +
+            "\"mock-name\"}, \"map-1\":\"{inner-key-1=inner-value-1, " +
+            "inner-key-2=[mock-tez-dependency]}\",\"spark-work\":" +
+            "{\"Spark\":{\"DagName:\":\"spark-work:2\"}}}");
+
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testOutputPlanJsonShouldMatch() throws Exception {
+    // SparkWork
+    SparkWork work = new SparkWork("spark-work");
+
+    JsonNode result = objectMapper.readTree(
+            uut.outputPlan(work, null, false, true, 0, null).toString());
+    JsonNode expected = objectMapper.readTree("{\"Spark\":{\"DagName:\":\"spark-work:1\"}}");
+    assertEquals(expected, result);
+
+    // Operator with single child
+    CollectOperator parentCollectOperator1 = new CollectOperator();
+    CollectOperator child1 = new CollectOperator();
+    parentCollectOperator1.setChildOperators(new ArrayList<Operator<? extends OperatorDesc>>(
+            Arrays.asList(new CollectOperator[] {child1})));
+    parentCollectOperator1.setConf(new CollectDesc());
+
+    result = objectMapper.readTree(
+            uut.outputPlan(parentCollectOperator1, null, false, true, 0, null).toString());
+    expected = objectMapper.readTree("{\"Collect\":{\"children\":{}}}");
+    assertEquals(expected, result);
+
+    // Operator with 2 children
+    CollectOperator parentCollectOperator2 = new CollectOperator();
+    CollectOperator child2 = new CollectOperator();
+    parentCollectOperator2.setChildOperators(new ArrayList<Operator<? extends OperatorDesc>>(
+            Arrays.asList(new CollectOperator[] {child1, child2})));
+    parentCollectOperator2.setConf(new CollectDesc());
+    result = objectMapper.readTree(
+            uut.outputPlan(parentCollectOperator2, null, false, true, 0, null).toString());
+    expected = objectMapper.readTree("{\"Collect\":{\"children\":[{},{}]}}");
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testCollectAuthRelatedEntitiesJsonShouldMatch() throws Exception {
+    QueryState qs = mock(QueryState.class);
+    when(qs.getHiveOperation()).thenReturn(HiveOperation.EXPLAIN);
+    uut.queryState = qs;
+
+    SessionState.start(new HiveConf(ExplainTask.class));
+    // SessionState.get().setCommandType(HiveOperation.EXPLAIN);
+    HiveAuthenticationProvider authenticationProviderMock = mock(HiveAuthenticationProvider.class);
+    when(authenticationProviderMock.getUserName()).thenReturn("test-user");
+    SessionState.get().setAuthenticator(authenticationProviderMock);
+    SessionState.get().setAuthorizer(mock(HiveAuthorizationProvider.class));
+    ExplainWork work = mockExplainWork();
+
+    JsonNode result = objectMapper.readTree(uut.collectAuthRelatedEntities(null, work).toString());
+    JsonNode expected = objectMapper.readTree("{\"CURRENT_USER\":\"test-user\"," +
+            "\"OPERATION\":\"EXPLAIN\",\"INPUTS\":[],\"OUTPUTS\":[]}");
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testOutputPlanVectorizationJsonShouldMatch() throws Exception {
+    QueryState qs = mock(QueryState.class);
+    when(qs.getHiveOperation()).thenReturn(HiveOperation.EXPLAIN);
+    HiveConf hiveConf = new HiveConf();
+    hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true);
+    when(qs.getConf()).thenReturn(hiveConf);
+    uut.queryState = qs;
+
+    JsonNode result = objectMapper.readTree(uut.outputPlanVectorization(null, true).getRight().toString());
+    JsonNode expected = objectMapper.readTree("{\"enabled\":true,\"enabledConditionsMet\":[\"hive.vectorized.execution.enabled IS true\"]}");
+    assertEquals(expected, result);
+
+
+    hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false);
+    result = objectMapper.readTree(uut.outputPlanVectorization(null, true).getRight().toString());
+    expected = objectMapper.readTree("{\"enabled\":false,\"enabledConditionsNotMet\":[\"hive.vectorized.execution.enabled IS false\"]}");
+    assertEquals(expected, result);
+
+  }
+
+  private TezWork.Dependency mockTezWorkDependency() {
+    TezWork.Dependency dep = mock(TezWork.Dependency.class);
+    when(dep.getName()).thenReturn("name");
+    when(dep.toString()).thenReturn("mock-tez-dependency");
+    return dep;
+  }
+
+  private SparkWork.Dependency mockSparkWorkDependency() {
+    SparkWork.Dependency dep = mock(SparkWork.Dependency.class);
+    when(dep.getName()).thenReturn("mock-name");
+    when(dep.toString()).thenReturn("mock-spark-dependency");
+    return dep;
+  }
+
+  private ExplainWork mockExplainWork() {
+    ExplainWork explainWork = mock(ExplainWork.class);
+
+    // Should produce JSON
+    when(explainWork.isFormatted()).thenReturn(true);
+
+    // Should have some AST
+    // when(explainWork.getAstStringTree()).thenReturn(AST);
+
+    when(explainWork.getAnalyzer()).thenReturn(mock(BaseSemanticAnalyzer.class));
+
+    return explainWork;
+  }
+
+  private Task<ExplainTask> mockTask() {
+    Task<ExplainTask> task = mock(Task.class);
+
+    // Explain type
+    when(task.getType()).thenReturn(StageType.EXPLAIN);
+
+    // This is a root task
+    when(task.isRootTask()).thenReturn(true);
+
+    // Set up backup task
+    Task backupTask = mock(Task.class);
+    when(backupTask.getId()).thenReturn(BACKUP_ID);
+    when(task.getBackupTask()).thenReturn(backupTask);
+
+    return task;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/2fa4dc27/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java
new file mode 100644
index 0000000..c484062
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.Collections;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestATSHook {
+
+  private ObjectMapper objectMapper = new ObjectMapper();
+  private ATSHook uut;
+
+  @Before
+  public void setUp() {
+    uut = new ATSHook();
+  }
+
+  @Test
+  public void testCreatePreHookEventJsonShhouldMatch() throws Exception {
+    TimelineEntity timelineEntity =  uut.createPreHookEvent(
+            "test-query-id", "test-query", new org.json.JSONObject(), 0L,
+            "test-user", "test-request-user", 0, 0, "test-opid",
+            "client-ip-address", "hive-instance-address", "hive-instance-type", "session-id", "log-id",
+            "thread-id", "execution-mode", Collections.<String>emptyList(), Collections.<String>emptyList(),
+            new HiveConf(), null, "domain-id");
+    String resultStr = (String) timelineEntity.getOtherInfo()
+            .get(ATSHook.OtherInfoTypes.QUERY.name());
+
+    JsonNode result = objectMapper.readTree(resultStr);
+    JsonNode expected = objectMapper.readTree("{\"queryText\":\"test-query\"," +
+            "\"queryPlan\":{}}");
+
+    assertEquals(expected, result);
+  }
+}