You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ae...@apache.org on 2017/09/26 23:20:35 UTC
[02/33] hadoop git commit: HADOOP-14901. ReuseObjectMapper in Hadoop
Common. Contributed by Hanisha Koneru.
HADOOP-14901. ReuseObjectMapper in Hadoop Common. Contributed by Hanisha Koneru.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e1b32e09
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e1b32e09
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e1b32e09
Branch: refs/heads/HDFS-7240
Commit: e1b32e0959dea5f5a40055157476f9320519a618
Parents: 164a063
Author: Anu Engineer <ae...@apache.org>
Authored: Fri Sep 22 15:45:22 2017 -0700
Committer: Anu Engineer <ae...@apache.org>
Committed: Fri Sep 22 15:45:22 2017 -0700
----------------------------------------------------------------------
.../hadoop/crypto/key/kms/KMSClientProvider.java | 7 +++++--
.../java/org/apache/hadoop/ipc/DecayRpcScheduler.java | 9 +++++----
.../src/main/java/org/apache/hadoop/log/Log4Json.java | 5 +++--
.../apache/hadoop/metrics2/MetricsJsonBuilder.java | 14 +++++++++-----
.../delegation/web/DelegationTokenAuthenticator.java | 7 +++++--
.../org/apache/hadoop/util/HttpExceptionUtils.java | 13 +++++++++----
6 files changed, 36 insertions(+), 19 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1b32e09/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
index b882335..a95d7e6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
@@ -77,6 +77,7 @@ import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
@@ -133,6 +134,9 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
private final ValueQueue<EncryptedKeyVersion> encKeyVersionQueue;
+ private static final ObjectWriter WRITER =
+ new ObjectMapper().writerWithDefaultPrettyPrinter();
+
private class EncryptedQueueRefiller implements
ValueQueue.QueueRefiller<EncryptedKeyVersion> {
@@ -226,8 +230,7 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
private static void writeJson(Object obj, OutputStream os)
throws IOException {
Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8);
- ObjectMapper jsonMapper = new ObjectMapper();
- jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, obj);
+ WRITER.writeValue(writer, obj);
}
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1b32e09/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java
index 5ae4e8b..a847d11 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java
@@ -36,6 +36,7 @@ import java.util.concurrent.atomic.AtomicReference;
import javax.management.ObjectName;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.AtomicDoubleArray;
import org.apache.commons.lang.exception.ExceptionUtils;
@@ -128,6 +129,8 @@ public class DecayRpcScheduler implements RpcScheduler,
public static final Logger LOG =
LoggerFactory.getLogger(DecayRpcScheduler.class);
+ private static final ObjectWriter WRITER = new ObjectMapper().writer();
+
// Track the decayed and raw (no decay) number of calls for each schedulable
// identity from all previous decay windows: idx 0 for decayed call count and
// idx 1 for the raw call count
@@ -909,8 +912,7 @@ public class DecayRpcScheduler implements RpcScheduler,
return "{}";
} else {
try {
- ObjectMapper om = new ObjectMapper();
- return om.writeValueAsString(decisions);
+ return WRITER.writeValueAsString(decisions);
} catch (Exception e) {
return "Error: " + e.getMessage();
}
@@ -919,8 +921,7 @@ public class DecayRpcScheduler implements RpcScheduler,
public String getCallVolumeSummary() {
try {
- ObjectMapper om = new ObjectMapper();
- return om.writeValueAsString(getDecayedCallCounts());
+ return WRITER.writeValueAsString(getDecayedCallCounts());
} catch (Exception e) {
return "Error: " + e.getMessage();
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1b32e09/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java
index 337846c..a2bbbfc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java
@@ -24,6 +24,7 @@ import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.MappingJsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.node.ContainerNode;
import org.apache.log4j.Layout;
import org.apache.log4j.helpers.ISO8601DateFormat;
@@ -105,6 +106,7 @@ public class Log4Json extends Layout {
* configuration it must be done in a static intializer block.
*/
private static final JsonFactory factory = new MappingJsonFactory();
+ private static final ObjectReader READER = new ObjectMapper(factory).reader();
public static final String DATE = "date";
public static final String EXCEPTION_CLASS = "exceptionclass";
public static final String LEVEL = "level";
@@ -252,8 +254,7 @@ public class Log4Json extends Layout {
* @throws IOException on any parsing problems
*/
public static ContainerNode parse(String json) throws IOException {
- ObjectMapper mapper = new ObjectMapper(factory);
- JsonNode jsonNode = mapper.readTree(json);
+ JsonNode jsonNode = READER.readTree(json);
if (!(jsonNode instanceof ContainerNode)) {
throw new IOException("Wrong JSON data: " + json);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1b32e09/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java
index 8e42909..ce6fbe1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java
@@ -18,17 +18,18 @@
package org.apache.hadoop.metrics2;
-import java.io.IOException;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.map.ObjectWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.IOException;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
/**
* Build a JSON dump of the metrics.
*
@@ -44,6 +45,9 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder {
private final MetricsCollector parent;
private Map<String, Object> innerMetrics = new LinkedHashMap<>();
+ private static final ObjectWriter WRITER =
+ new ObjectMapper().writer();
+
/**
* Build an instance.
* @param parent parent collector. Unused in this instance; only used for
@@ -116,7 +120,7 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder {
@Override
public String toString() {
try {
- return new ObjectMapper().writeValueAsString(innerMetrics);
+ return WRITER.writeValueAsString(innerMetrics);
} catch (IOException e) {
LOG.warn("Failed to dump to Json.", e);
return ExceptionUtils.getStackTrace(e);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1b32e09/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
index 06932ac..3dba1db 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.security.token.delegation.web;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.security.SecurityUtil;
@@ -54,6 +55,9 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
private static final String CONTENT_TYPE = "Content-Type";
private static final String APPLICATION_JSON_MIME = "application/json";
+ private static final ObjectReader READER =
+ new ObjectMapper().readerFor(Map.class);
+
private static final String HTTP_GET = "GET";
private static final String HTTP_PUT = "PUT";
@@ -316,8 +320,7 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
if (contentType != null &&
contentType.contains(APPLICATION_JSON_MIME)) {
try {
- ObjectMapper mapper = new ObjectMapper();
- ret = mapper.readValue(conn.getInputStream(), Map.class);
+ ret = READER.readValue(conn.getInputStream());
} catch (Exception ex) {
throw new AuthenticationException(String.format(
"'%s' did not handle the '%s' delegation token operation: %s",
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1b32e09/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java
index 52403d8..cdb8112 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.util;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -54,6 +56,11 @@ public class HttpExceptionUtils {
private static final String ENTER = System.getProperty("line.separator");
+ private static final ObjectReader READER =
+ new ObjectMapper().readerFor(Map.class);
+ private static final ObjectWriter WRITER =
+ new ObjectMapper().writerWithDefaultPrettyPrinter();
+
/**
* Creates a HTTP servlet response serializing the exception in it as JSON.
*
@@ -74,9 +81,8 @@ public class HttpExceptionUtils {
json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName());
Map<String, Object> jsonResponse = new LinkedHashMap<String, Object>();
jsonResponse.put(ERROR_JSON, json);
- ObjectMapper jsonMapper = new ObjectMapper();
Writer writer = response.getWriter();
- jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, jsonResponse);
+ WRITER.writeValue(writer, jsonResponse);
writer.flush();
}
@@ -144,8 +150,7 @@ public class HttpExceptionUtils {
InputStream es = null;
try {
es = conn.getErrorStream();
- ObjectMapper mapper = new ObjectMapper();
- Map json = mapper.readValue(es, Map.class);
+ Map json = READER.readValue(es);
json = (Map) json.get(ERROR_JSON);
String exClass = (String) json.get(ERROR_CLASSNAME_JSON);
String exMsg = (String) json.get(ERROR_MESSAGE_JSON);
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org