You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by as...@apache.org on 2016/02/11 09:00:01 UTC

[27/50] hadoop git commit: MAPREDUCE-6626. Reuse ObjectMapper instance in MapReduce. Contributed by Lin Yiqun.

MAPREDUCE-6626. Reuse ObjectMapper instance in MapReduce. Contributed by Lin Yiqun.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a0b1f10a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a0b1f10a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a0b1f10a

Branch: refs/heads/yarn-2877
Commit: a0b1f10a30dc2736cc136f257b0d3bf0140158bb
Parents: fba6e9f
Author: Akira Ajisaka <aa...@apache.org>
Authored: Wed Feb 10 03:03:49 2016 +0900
Committer: Akira Ajisaka <aa...@apache.org>
Committed: Wed Feb 10 03:05:07 2016 +0900

----------------------------------------------------------------------
 hadoop-mapreduce-project/CHANGES.txt                          | 3 +++
 .../hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java   | 6 ++++--
 .../main/java/org/apache/hadoop/mapreduce/JobSubmitter.java   | 7 ++++---
 3 files changed, 11 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0b1f10a/hadoop-mapreduce-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index 6279d19..a6f2444 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -479,6 +479,9 @@ Release 2.8.0 - UNRELEASED
     MAPREDUCE-6473. Job submission can take a long time during Cluster
     initialization (Kuhu Shukla via jlowe)
 
+    MAPREDUCE-6626. Reuse ObjectMapper instance in MapReduce.
+    (Lin Yiqun via aajisaka)
+
   BUG FIXES
 
     MAPREDUCE-6314. TestPipeApplication fails on trunk.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0b1f10a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index 63e3333..5690743 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -73,6 +73,7 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.codehaus.jackson.JsonNode;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.node.ArrayNode;
+import org.codehaus.jackson.node.JsonNodeFactory;
 import org.codehaus.jackson.node.ObjectNode;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -84,6 +85,8 @@ import com.google.common.annotations.VisibleForTesting;
  */
 public class JobHistoryEventHandler extends AbstractService
     implements EventHandler<JobHistoryEvent> {
+  private static final JsonNodeFactory FACTORY =
+      new ObjectMapper().getNodeFactory();
 
   private final AppContext context;
   private final int startCount;
@@ -1040,8 +1043,7 @@ public class JobHistoryEventHandler extends AbstractService
 
   @Private
   public JsonNode countersToJSON(Counters counters) {
-    ObjectMapper mapper = new ObjectMapper();
-    ArrayNode nodes = mapper.createArrayNode();
+    ArrayNode nodes = FACTORY.arrayNode();
     if (counters != null) {
       for (CounterGroup counterGroup : counters) {
         ObjectNode groupNode = nodes.addObject();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0b1f10a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
index a458e2c..18b76a1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
@@ -63,6 +63,7 @@ import org.apache.hadoop.yarn.api.records.ReservationId;
 import org.codehaus.jackson.JsonParseException;
 import org.codehaus.jackson.map.JsonMappingException;
 import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.map.ObjectReader;
 
 import com.google.common.base.Charsets;
 
@@ -70,6 +71,8 @@ import com.google.common.base.Charsets;
 @InterfaceStability.Unstable
 class JobSubmitter {
   protected static final Log LOG = LogFactory.getLog(JobSubmitter.class);
+  private static final ObjectReader READER =
+      new ObjectMapper().reader(Map.class);
   private static final String SHUFFLE_KEYGEN_ALGORITHM = "HmacSHA1";
   private static final int SHUFFLE_KEY_LENGTH = 64;
   private FileSystem jtFs;
@@ -396,9 +399,7 @@ class JobSubmitter {
       boolean json_error = false;
       try {
         // read JSON
-        ObjectMapper mapper = new ObjectMapper();
-        Map<String, String> nm = 
-          mapper.readValue(new File(localFileName), Map.class);
+        Map<String, String> nm = READER.readValue(new File(localFileName));
 
         for(Map.Entry<String, String> ent: nm.entrySet()) {
           credentials.addSecretKey(new Text(ent.getKey()), ent.getValue()