You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by st...@apache.org on 2016/10/06 15:31:20 UTC

hadoop git commit: HADOOP-13678 Update jackson from 1.9.13 to 2.x in hadoop-tools. Contributed by Akira Ajisaka.

Repository: hadoop
Updated Branches:
  refs/heads/trunk 4d2f380d7 -> 2cc841f16


HADOOP-13678 Update jackson from 1.9.13 to 2.x in hadoop-tools. Contributed by Akira Ajisaka.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2cc841f1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2cc841f1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2cc841f1

Branch: refs/heads/trunk
Commit: 2cc841f16ec9aa5336495fc20ee781a1276fddc5
Parents: 4d2f380
Author: Steve Loughran <st...@apache.org>
Authored: Thu Oct 6 16:30:26 2016 +0100
Committer: Steve Loughran <st...@apache.org>
Committed: Thu Oct 6 16:31:00 2016 +0100

----------------------------------------------------------------------
 hadoop-tools/hadoop-azure-datalake/pom.xml      |  4 +++
 ...ClientCredentialBasedAccesTokenProvider.java |  5 +--
 hadoop-tools/hadoop-azure/pom.xml               |  6 +++-
 .../hadoop/fs/azure/NativeAzureFileSystem.java  | 16 ++++-----
 hadoop-tools/hadoop-openstack/pom.xml           | 18 +++++-----
 .../swift/auth/ApiKeyAuthenticationRequest.java |  2 +-
 .../fs/swift/auth/entities/AccessToken.java     |  2 +-
 .../hadoop/fs/swift/auth/entities/Catalog.java  |  2 +-
 .../hadoop/fs/swift/auth/entities/Endpoint.java |  2 +-
 .../hadoop/fs/swift/auth/entities/Tenant.java   |  2 +-
 .../hadoop/fs/swift/auth/entities/User.java     |  2 +-
 .../snative/SwiftNativeFileSystemStore.java     |  3 +-
 .../apache/hadoop/fs/swift/util/JSONUtil.java   | 24 +++++--------
 hadoop-tools/hadoop-rumen/pom.xml               |  9 +++++
 .../apache/hadoop/tools/rumen/Anonymizer.java   | 23 ++++++-------
 .../hadoop/tools/rumen/HadoopLogsAnalyzer.java  |  3 +-
 .../tools/rumen/JsonObjectMapperParser.java     | 17 ++++-----
 .../tools/rumen/JsonObjectMapperWriter.java     | 21 +++++-------
 .../apache/hadoop/tools/rumen/LoggedJob.java    |  2 +-
 .../hadoop/tools/rumen/LoggedLocation.java      |  2 +-
 .../tools/rumen/LoggedNetworkTopology.java      |  2 +-
 .../rumen/LoggedSingleRelativeRanking.java      |  4 +--
 .../apache/hadoop/tools/rumen/LoggedTask.java   |  2 +-
 .../hadoop/tools/rumen/LoggedTaskAttempt.java   |  2 +-
 .../hadoop/tools/rumen/datatypes/NodeName.java  |  2 +-
 .../rumen/serializers/BlockingSerializer.java   | 10 +++---
 .../DefaultAnonymizingRumenSerializer.java      |  8 ++---
 .../serializers/DefaultRumenSerializer.java     |  9 ++---
 .../serializers/ObjectStringSerializer.java     | 10 +++---
 .../apache/hadoop/tools/rumen/state/State.java  |  2 +-
 .../tools/rumen/state/StateDeserializer.java    | 14 ++++----
 .../hadoop/tools/rumen/state/StatePool.java     | 36 ++++++++------------
 .../hadoop/tools/rumen/TestHistograms.java      | 13 +++----
 hadoop-tools/hadoop-sls/pom.xml                 |  4 +++
 .../hadoop/yarn/sls/RumenToSLSConverter.java    |  8 ++---
 .../org/apache/hadoop/yarn/sls/SLSRunner.java   |  7 ++--
 .../apache/hadoop/yarn/sls/utils/SLSUtils.java  | 10 +++---
 37 files changed, 151 insertions(+), 157 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-azure-datalake/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/pom.xml b/hadoop-tools/hadoop-azure-datalake/pom.xml
index c07a1d7..e1a0bfe 100644
--- a/hadoop-tools/hadoop-azure-datalake/pom.xml
+++ b/hadoop-tools/hadoop-azure-datalake/pom.xml
@@ -181,5 +181,9 @@
       <version>2.4.0</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+    </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/oauth2/AzureADClientCredentialBasedAccesTokenProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/oauth2/AzureADClientCredentialBasedAccesTokenProvider.java b/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/oauth2/AzureADClientCredentialBasedAccesTokenProvider.java
index 6dfc593..11d07e7 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/oauth2/AzureADClientCredentialBasedAccesTokenProvider.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/oauth2/AzureADClientCredentialBasedAccesTokenProvider.java
@@ -18,6 +18,9 @@
  */
 package org.apache.hadoop.hdfs.web.oauth2;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import com.fasterxml.jackson.databind.ObjectReader;
 import com.squareup.okhttp.OkHttpClient;
 import com.squareup.okhttp.Request;
 import com.squareup.okhttp.RequestBody;
@@ -29,8 +32,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.web.URLConnectionFactory;
 import org.apache.hadoop.util.Timer;
 import org.apache.http.HttpStatus;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.ObjectReader;
 
 import java.io.IOException;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-azure/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure/pom.xml b/hadoop-tools/hadoop-azure/pom.xml
index 48f9043..d8121e2 100644
--- a/hadoop-tools/hadoop-azure/pom.xml
+++ b/hadoop-tools/hadoop-azure/pom.xml
@@ -217,6 +217,10 @@
       <artifactId>mockito-all</artifactId>
       <scope>test</scope>
     </dependency>
-    
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+    </dependency>
+
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java
index fb0d31f..54eb90f 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java
@@ -40,6 +40,10 @@ import java.util.concurrent.atomic.AtomicInteger;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -63,14 +67,10 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Time;
-import org.codehaus.jackson.JsonNode;
-import org.codehaus.jackson.JsonParseException;
-import org.codehaus.jackson.JsonParser;
-import org.codehaus.jackson.map.JsonMappingException;
-import org.codehaus.jackson.map.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.annotations.VisibleForTesting;
 import com.microsoft.azure.storage.StorageException;
 
@@ -193,8 +193,8 @@ public class NativeAzureFileSystem extends FileSystem {
       if (oldFolderName == null || newFolderName == null) {
     	  this.committed = false;
       } else {
-        this.srcKey = oldFolderName.getTextValue();
-        this.dstKey = newFolderName.getTextValue();
+        this.srcKey = oldFolderName.textValue();
+        this.dstKey = newFolderName.textValue();
         if (this.srcKey == null || this.dstKey == null) {
           this.committed = false;
         } else {
@@ -203,7 +203,7 @@ public class NativeAzureFileSystem extends FileSystem {
             this.committed = false;
           } else {
             for (int i = 0; i < fileList.size(); i++) {
-              fileStrList.add(fileList.get(i).getTextValue());
+              fileStrList.add(fileList.get(i).textValue());
             }
           }
         }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-openstack/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/pom.xml b/hadoop-tools/hadoop-openstack/pom.xml
index b036e84..f9a4df0 100644
--- a/hadoop-tools/hadoop-openstack/pom.xml
+++ b/hadoop-tools/hadoop-openstack/pom.xml
@@ -124,16 +124,6 @@
     </dependency>
 
     <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-mapper-asl</artifactId>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-core-asl</artifactId>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
       <groupId>commons-httpclient</groupId>
       <artifactId>commons-httpclient</artifactId>
       <version>3.1</version>
@@ -150,5 +140,13 @@
       <artifactId>junit</artifactId>
       <scope>provided</scope>
     </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-annotations</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+    </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/ApiKeyAuthenticationRequest.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/ApiKeyAuthenticationRequest.java b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/ApiKeyAuthenticationRequest.java
index f5f9a8c..e25d17d 100644
--- a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/ApiKeyAuthenticationRequest.java
+++ b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/ApiKeyAuthenticationRequest.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.fs.swift.auth;
 
-import org.codehaus.jackson.annotate.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonProperty;
 
 /**
  * Class that represents authentication request to Openstack Keystone.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/AccessToken.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/AccessToken.java b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/AccessToken.java
index a01e855..b38d466 100644
--- a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/AccessToken.java
+++ b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/AccessToken.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.fs.swift.auth.entities;
 
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 
 /**
  * Access token representation of Openstack Keystone authentication.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Catalog.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Catalog.java b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Catalog.java
index 838d87f..76e161b 100644
--- a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Catalog.java
+++ b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Catalog.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.fs.swift.auth.entities;
 
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 
 import java.util.List;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Endpoint.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Endpoint.java b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Endpoint.java
index f9de895..b1cbf2a 100644
--- a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Endpoint.java
+++ b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Endpoint.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.fs.swift.auth.entities;
 
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 
 import java.net.URI;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Tenant.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Tenant.java b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Tenant.java
index da94c40..405d2c8 100644
--- a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Tenant.java
+++ b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/Tenant.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.fs.swift.auth.entities;
 
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 
 /**
  * Tenant is abstraction in Openstack which describes all account

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/User.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/User.java b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/User.java
index 1a6954a..da3bac2 100644
--- a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/User.java
+++ b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/auth/entities/User.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.fs.swift.auth.entities;
 
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 import org.apache.hadoop.fs.swift.auth.Roles;
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 
 import java.util.List;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
index cc3e3d2..71d8d82 100644
--- a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
+++ b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.fs.swift.snative;
 
+import com.fasterxml.jackson.databind.type.CollectionType;
+
 import org.apache.commons.httpclient.Header;
 import org.apache.commons.httpclient.HttpStatus;
 import org.apache.commons.logging.Log;
@@ -36,7 +38,6 @@ import org.apache.hadoop.fs.swift.util.DurationStats;
 import org.apache.hadoop.fs.swift.util.JSONUtil;
 import org.apache.hadoop.fs.swift.util.SwiftObjectPath;
 import org.apache.hadoop.fs.swift.util.SwiftUtils;
-import org.codehaus.jackson.map.type.CollectionType;
 
 import java.io.ByteArrayInputStream;
 import java.io.FileNotFoundException;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/JSONUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/JSONUtil.java b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/JSONUtil.java
index b17cb65..fee7e7f 100644
--- a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/JSONUtil.java
+++ b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/JSONUtil.java
@@ -18,12 +18,12 @@
 
 package org.apache.hadoop.fs.swift.util;
 
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.type.CollectionType;
 import org.apache.hadoop.fs.swift.exceptions.SwiftJsonMarshallingException;
-import org.codehaus.jackson.JsonGenerationException;
-import org.codehaus.jackson.map.JsonMappingException;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.type.CollectionType;
-import org.codehaus.jackson.type.TypeReference;
 
 import java.io.IOException;
 import java.io.StringWriter;
@@ -54,9 +54,7 @@ public class JSONUtil {
     try {
       jsonMapper.writeValue(json, object);
       return json.toString();
-    } catch (JsonGenerationException e) {
-      throw new SwiftJsonMarshallingException(e.toString(), e);
-    } catch (JsonMappingException e) {
+    } catch (JsonGenerationException | JsonMappingException e) {
       throw new SwiftJsonMarshallingException(e.toString(), e);
     }
   }
@@ -96,9 +94,7 @@ public class JSONUtil {
             throws IOException {
     try {
       return (T)jsonMapper.readValue(value, typeReference);
-    } catch (JsonGenerationException e) {
-      throw new SwiftJsonMarshallingException("Error generating response", e);
-    } catch (JsonMappingException e) {
+    } catch (JsonGenerationException | JsonMappingException e) {
       throw new SwiftJsonMarshallingException("Error generating response", e);
     }
   }
@@ -115,11 +111,7 @@ public class JSONUtil {
               throws IOException {
     try {
       return (T)jsonMapper.readValue(value, collectionType);
-    } catch (JsonGenerationException e) {
-      throw new SwiftJsonMarshallingException(e.toString()
-                                              + " source: " + value,
-                                              e);
-    } catch (JsonMappingException e) {
+    } catch (JsonGenerationException | JsonMappingException e) {
       throw new SwiftJsonMarshallingException(e.toString()
                                               + " source: " + value,
                                               e);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/pom.xml b/hadoop-tools/hadoop-rumen/pom.xml
index f5acf4e..9e1b1f3 100644
--- a/hadoop-tools/hadoop-rumen/pom.xml
+++ b/hadoop-tools/hadoop-rumen/pom.xml
@@ -94,6 +94,15 @@
       <type>test-jar</type>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+    </dependency>
   </dependencies>
 
   <build>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java
index e1a2be7..3c85a93 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java
@@ -21,6 +21,12 @@ package org.apache.hadoop.tools.rumen;
 import java.io.IOException;
 import java.io.OutputStream;
 
+import com.fasterxml.jackson.core.JsonEncoding;
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.Version;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.module.SimpleModule;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -36,13 +42,6 @@ import org.apache.hadoop.tools.rumen.datatypes.*;
 import org.apache.hadoop.tools.rumen.serializers.*;
 import org.apache.hadoop.tools.rumen.state.*;
 
-import org.codehaus.jackson.JsonEncoding;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.JsonGenerator;
-import org.codehaus.jackson.Version;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.module.SimpleModule;
-
 public class Anonymizer extends Configured implements Tool {
   private boolean anonymizeTrace = false;
   private Path inputTracePath = null;
@@ -88,8 +87,8 @@ public class Anonymizer extends Configured implements Tool {
      
     outMapper = new ObjectMapper();
     // define a module
-    SimpleModule module = new SimpleModule("Anonymization Serializer",  
-                                           new Version(0, 1, 1, "FINAL"));
+    SimpleModule module = new SimpleModule(
+        "Anonymization Serializer", new Version(0, 1, 1, "FINAL", "", ""));
     // add various serializers to the module
     // use the default (as-is) serializer for default data types
     module.addSerializer(DataType.class, new DefaultRumenSerializer());
@@ -106,7 +105,7 @@ public class Anonymizer extends Configured implements Tool {
     // register the module with the object-mapper
     outMapper.registerModule(module);
     
-    outFactory = outMapper.getJsonFactory();
+    outFactory = outMapper.getFactory();
   }
   
   // anonymize the job trace file
@@ -191,8 +190,8 @@ public class Anonymizer extends Configured implements Tool {
       output = outFS.create(path);
     }
 
-    JsonGenerator outGen = outFactory.createJsonGenerator(output, 
-                                                          JsonEncoding.UTF8);
+    JsonGenerator outGen =
+        outFactory.createGenerator(output, JsonEncoding.UTF8);
     outGen.useDefaultPrettyPrinter();
     
     return outGen;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
index c53a7c2..eceb98d 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
@@ -35,6 +35,7 @@ import java.util.List;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import com.fasterxml.jackson.core.JsonProcessingException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -57,8 +58,6 @@ import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.apache.hadoop.io.compress.CodecPool;
 import org.apache.hadoop.io.compress.Decompressor;
 
-import org.codehaus.jackson.JsonProcessingException;
-
 /**
  * This is the main class for rumen log mining functionality.
  * 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java
index cbd3679..f95878d 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java
@@ -18,15 +18,14 @@
 package org.apache.hadoop.tools.rumen;
 
 import java.io.Closeable;
-import java.io.EOFException;
 import java.io.IOException;
 import java.io.InputStream;
 
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.codehaus.jackson.JsonParser;
-import org.codehaus.jackson.map.DeserializationConfig;
-import org.codehaus.jackson.map.ObjectMapper;
 
 /**
  * A simple wrapper for parsing JSON-encoded data using ObjectMapper.
@@ -50,11 +49,9 @@ class JsonObjectMapperParser<T> implements Closeable {
   public JsonObjectMapperParser(Path path, Class<? extends T> clazz,
       Configuration conf) throws IOException {
     mapper = new ObjectMapper();
-    mapper.configure(
-        DeserializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
     this.clazz = clazz;
     InputStream input = new PossiblyDecompressedInputStream(path, conf);
-    jsonParser = mapper.getJsonFactory().createJsonParser(input);
+    jsonParser = mapper.getFactory().createParser(input);
   }
 
   /**
@@ -66,10 +63,8 @@ class JsonObjectMapperParser<T> implements Closeable {
   public JsonObjectMapperParser(InputStream input, Class<? extends T> clazz)
       throws IOException {
     mapper = new ObjectMapper();
-    mapper.configure(
-        DeserializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
     this.clazz = clazz;
-    jsonParser = mapper.getJsonFactory().createJsonParser(input);
+    jsonParser = mapper.getFactory().createParser(input);
   }
 
   /**
@@ -82,7 +77,7 @@ class JsonObjectMapperParser<T> implements Closeable {
   public T getNext() throws IOException {
     try {
       return mapper.readValue(jsonParser, clazz);
-    } catch (EOFException e) {
+    } catch (JsonMappingException e) {
       return null;
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java
index 47bfee0..747b141 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java
@@ -21,16 +21,15 @@ import java.io.Closeable;
 import java.io.IOException;
 import java.io.OutputStream;
 
+import com.fasterxml.jackson.core.JsonEncoding;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.Version;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.module.SimpleModule;
 import org.apache.hadoop.mapreduce.ID;
 import org.apache.hadoop.tools.rumen.datatypes.DataType;
 import org.apache.hadoop.tools.rumen.serializers.DefaultRumenSerializer;
 import org.apache.hadoop.tools.rumen.serializers.ObjectStringSerializer;
-import org.codehaus.jackson.JsonEncoding;
-import org.codehaus.jackson.JsonGenerator;
-import org.codehaus.jackson.Version;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.SerializationConfig;
-import org.codehaus.jackson.map.module.SimpleModule;
 
 /**
  * Simple wrapper around {@link JsonGenerator} to write objects in JSON format.
@@ -41,12 +40,10 @@ public class JsonObjectMapperWriter<T> implements Closeable {
   
   public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException {
     ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(
-        SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
 
     // define a module
-    SimpleModule module = new SimpleModule("Default Serializer",  
-                                           new Version(0, 1, 1, "FINAL"));
+    SimpleModule module = new SimpleModule(
+        "Default Serializer", new Version(0, 1, 1, "FINAL", "", ""));
     // add various serializers to the module
     //   add default (all-pass) serializer for all rumen specific data types
     module.addSerializer(DataType.class, new DefaultRumenSerializer());
@@ -56,9 +53,7 @@ public class JsonObjectMapperWriter<T> implements Closeable {
     // register the module with the object-mapper
     mapper.registerModule(module);
 
-    mapper.getJsonFactory();
-    writer = mapper.getJsonFactory().createJsonGenerator(
-        output, JsonEncoding.UTF8);
+    writer = mapper.getFactory().createGenerator(output, JsonEncoding.UTF8);
     if (prettyPrint) {
       writer.useDefaultPrettyPrinter();
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedJob.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedJob.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedJob.java
index 785feb3..597aab8 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedJob.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedJob.java
@@ -27,9 +27,9 @@ import java.util.Properties;
 import java.util.Set;
 import java.util.TreeSet;
 
+import com.fasterxml.jackson.annotation.JsonAnySetter;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.tools.rumen.datatypes.*;
-import org.codehaus.jackson.annotate.JsonAnySetter;
 
 /**
  * A {@link LoggedDiscreteCDF} is a representation of an hadoop job, with the

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedLocation.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedLocation.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedLocation.java
index 047cd63..6d51e4a 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedLocation.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedLocation.java
@@ -25,8 +25,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.TreeSet;
 
+import com.fasterxml.jackson.annotation.JsonAnySetter;
 import org.apache.hadoop.tools.rumen.datatypes.NodeName;
-import org.codehaus.jackson.annotate.JsonAnySetter;
 
 /**
  * A {@link LoggedLocation} is a representation of a point in an hierarchical

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java
index 23bbb98..5d79a83 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java
@@ -29,8 +29,8 @@ import java.util.TreeSet;
 import java.util.ArrayList;
 import java.util.Comparator;
 
+import com.fasterxml.jackson.annotation.JsonAnySetter;
 import org.apache.hadoop.tools.rumen.datatypes.NodeName;
-import org.codehaus.jackson.annotate.JsonAnySetter;
 
 /**
  * A {@link LoggedNetworkTopology} represents a tree that in turn represents a

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedSingleRelativeRanking.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedSingleRelativeRanking.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedSingleRelativeRanking.java
index e507116..d9be325 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedSingleRelativeRanking.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedSingleRelativeRanking.java
@@ -17,11 +17,11 @@
  */
 package org.apache.hadoop.tools.rumen;
 
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+
 import java.util.Set;
 import java.util.TreeSet;
 
-import org.codehaus.jackson.annotate.JsonAnySetter;
-
 /**
  * A {@link LoggedSingleRelativeRanking} represents an X-Y coordinate of a
  * single point in a discrete CDF.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
index 4a23fa6..4ae33a7 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
@@ -23,13 +23,13 @@ import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
 
+import com.fasterxml.jackson.annotation.JsonAnySetter;
 import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapreduce.jobhistory.JhCounter;
 import org.apache.hadoop.mapreduce.jobhistory.JhCounterGroup;
 import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
 
 import org.apache.hadoop.util.StringUtils;
-import org.codehaus.jackson.annotate.JsonAnySetter;
 
 /**
  * A {@link LoggedTask} represents a [hadoop] task that is part of a hadoop job.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
index c21eb39..5c6abd3 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
@@ -23,8 +23,8 @@ import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
 
+import com.fasterxml.jackson.annotation.JsonAnySetter;
 import org.apache.hadoop.util.StringUtils;
-import org.codehaus.jackson.annotate.JsonAnySetter;
 
 // HACK ALERT!!!  This "should" have have two subclasses, which might be called
 //                LoggedMapTaskAttempt and LoggedReduceTaskAttempt, but 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java
index c0b8d45..20eb535 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java
@@ -17,12 +17,12 @@
  */
 package org.apache.hadoop.tools.rumen.datatypes;
 
+import com.fasterxml.jackson.annotation.JsonIgnore;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.tools.rumen.ParsedHost;
 import org.apache.hadoop.tools.rumen.anonymization.WordList;
 import org.apache.hadoop.tools.rumen.state.State;
 import org.apache.hadoop.tools.rumen.state.StatePool;
-import org.codehaus.jackson.annotate.JsonIgnore;
 
 /**
  * Represents the cluster host.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/BlockingSerializer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/BlockingSerializer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/BlockingSerializer.java
index 4338602..a720214 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/BlockingSerializer.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/BlockingSerializer.java
@@ -17,12 +17,12 @@
  */
 package org.apache.hadoop.tools.rumen.serializers;
 
-import java.io.IOException;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.SerializerProvider;
 
-import org.codehaus.jackson.JsonGenerator;
-import org.codehaus.jackson.JsonProcessingException;
-import org.codehaus.jackson.map.JsonSerializer;
-import org.codehaus.jackson.map.SerializerProvider;
+import java.io.IOException;
 
 /**
  * A JSON serializer for Strings.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/DefaultAnonymizingRumenSerializer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/DefaultAnonymizingRumenSerializer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/DefaultAnonymizingRumenSerializer.java
index d4e6fd5..944d193 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/DefaultAnonymizingRumenSerializer.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/DefaultAnonymizingRumenSerializer.java
@@ -19,13 +19,13 @@ package org.apache.hadoop.tools.rumen.serializers;
 
 import java.io.IOException;
 
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.SerializerProvider;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.tools.rumen.datatypes.AnonymizableDataType;
 import org.apache.hadoop.tools.rumen.state.StatePool;
-import org.codehaus.jackson.JsonGenerator;
-import org.codehaus.jackson.JsonProcessingException;
-import org.codehaus.jackson.map.JsonSerializer;
-import org.codehaus.jackson.map.SerializerProvider;
 
 /**
  * Default Rumen JSON serializer.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/DefaultRumenSerializer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/DefaultRumenSerializer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/DefaultRumenSerializer.java
index 1b433d8..766a750 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/DefaultRumenSerializer.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/DefaultRumenSerializer.java
@@ -19,11 +19,12 @@ package org.apache.hadoop.tools.rumen.serializers;
 
 import java.io.IOException;
 
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.SerializerProvider;
+
 import org.apache.hadoop.tools.rumen.datatypes.DataType;
-import org.codehaus.jackson.JsonGenerator;
-import org.codehaus.jackson.JsonProcessingException;
-import org.codehaus.jackson.map.JsonSerializer;
-import org.codehaus.jackson.map.SerializerProvider;
 
 /**
  * Default Rumen JSON serializer.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/ObjectStringSerializer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/ObjectStringSerializer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/ObjectStringSerializer.java
index 69e8950..a576871 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/ObjectStringSerializer.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/serializers/ObjectStringSerializer.java
@@ -17,12 +17,12 @@
  */
 package org.apache.hadoop.tools.rumen.serializers;
 
-import java.io.IOException;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.SerializerProvider;
 
-import org.codehaus.jackson.JsonGenerator;
-import org.codehaus.jackson.JsonProcessingException;
-import org.codehaus.jackson.map.JsonSerializer;
-import org.codehaus.jackson.map.SerializerProvider;
+import java.io.IOException;
 
 /**
  * Rumen JSON serializer for serializing object using toSring() API.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/State.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/State.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/State.java
index 94a78c2..6ff43d5 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/State.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/State.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.tools.rumen.state;
 
-import org.codehaus.jackson.annotate.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonIgnore;
 
 /**
  * Represents a state. This state is managed by {@link StatePool}.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StateDeserializer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StateDeserializer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StateDeserializer.java
index 47ceb8e..4fd2f12 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StateDeserializer.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StateDeserializer.java
@@ -19,13 +19,13 @@ package org.apache.hadoop.tools.rumen.state;
 
 import java.io.IOException;
 
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+import com.fasterxml.jackson.databind.node.ObjectNode;
 import org.apache.hadoop.tools.rumen.state.StatePool.StatePair;
-import org.codehaus.jackson.JsonParser;
-import org.codehaus.jackson.JsonProcessingException;
-import org.codehaus.jackson.map.DeserializationContext;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.deser.std.StdDeserializer;
-import org.codehaus.jackson.node.ObjectNode;
 
 /**
  * Rumen JSON deserializer for deserializing the {@link State} object.
@@ -46,7 +46,7 @@ public class StateDeserializer extends StdDeserializer<StatePair> {
     
     try {
       stateClass = 
-        Class.forName(statePairObject.get("className").getTextValue().trim());
+        Class.forName(statePairObject.get("className").textValue().trim());
     } catch (ClassNotFoundException cnfe) {
       throw new RuntimeException("Invalid classname!", cnfe);
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java
index 576a3c0..6e6c859 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java
@@ -27,6 +27,14 @@ import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.HashMap;
 
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.core.JsonEncoding;
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.Version;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.module.SimpleModule;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -35,16 +43,6 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.tools.rumen.Anonymizer;
 import org.apache.hadoop.tools.rumen.datatypes.DataType;
-import org.codehaus.jackson.JsonEncoding;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.JsonGenerator;
-import org.codehaus.jackson.JsonParser;
-import org.codehaus.jackson.Version;
-import org.codehaus.jackson.annotate.JsonIgnore;
-import org.codehaus.jackson.map.DeserializationConfig;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.SerializationConfig;
-import org.codehaus.jackson.map.module.SimpleModule;
 
 /**
  * A pool of states. States used by {@link DataType}'s can be managed the 
@@ -212,20 +210,16 @@ public class StatePool {
   
   private void read(DataInput in) throws IOException {
     ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(
-        DeserializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
-    
     // define a module
     SimpleModule module = new SimpleModule("State Serializer",  
-        new Version(0, 1, 1, "FINAL"));
+        new Version(0, 1, 1, "FINAL", "", ""));
     // add the state deserializer
     module.addDeserializer(StatePair.class, new StateDeserializer());
 
     // register the module with the object-mapper
     mapper.registerModule(module);
 
-    JsonParser parser = 
-      mapper.getJsonFactory().createJsonParser((DataInputStream)in);
+    JsonParser parser = mapper.getFactory().createParser((DataInputStream)in);
     StatePool statePool = mapper.readValue(parser, StatePool.class);
     this.setStates(statePool.getStates());
     parser.close();
@@ -283,20 +277,18 @@ public class StatePool {
     // This is just a JSON experiment
     System.out.println("Dumping the StatePool's in JSON format.");
     ObjectMapper outMapper = new ObjectMapper();
-    outMapper.configure(
-        SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
     // define a module
     SimpleModule module = new SimpleModule("State Serializer",  
-        new Version(0, 1, 1, "FINAL"));
+        new Version(0, 1, 1, "FINAL", "", ""));
     // add the state serializer
     //module.addSerializer(State.class, new StateSerializer());
 
     // register the module with the object-mapper
     outMapper.registerModule(module);
 
-    JsonFactory outFactory = outMapper.getJsonFactory();
-    JsonGenerator jGen = 
-      outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
+    JsonFactory outFactory = outMapper.getFactory();
+    JsonGenerator jGen =
+        outFactory.createGenerator((DataOutputStream)out, JsonEncoding.UTF8);
     jGen.useDefaultPrettyPrinter();
 
     jGen.writeObject(this);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java b/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java
index 372d93e..206095a 100644
--- a/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java
+++ b/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java
@@ -21,16 +21,17 @@ import java.io.IOException;
 
 import java.util.List;
 
+import com.fasterxml.jackson.core.JsonEncoding;
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.codehaus.jackson.JsonEncoding;
-import org.codehaus.jackson.JsonGenerator;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.ObjectMapper;
 
 import org.junit.Ignore;
 import org.junit.Test;
@@ -139,9 +140,9 @@ public class TestHistograms {
         Path goldFilePath = new Path(filePath.getParent(), "gold"+testName);
 
         ObjectMapper mapper = new ObjectMapper();
-        JsonFactory factory = mapper.getJsonFactory();
+        JsonFactory factory = mapper.getFactory();
         FSDataOutputStream ostream = lfs.create(goldFilePath, true);
-        JsonGenerator gen = factory.createJsonGenerator(ostream,
+        JsonGenerator gen = factory.createGenerator(ostream,
             JsonEncoding.UTF8);
         gen.useDefaultPrettyPrinter();
         

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-sls/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-sls/pom.xml b/hadoop-tools/hadoop-sls/pom.xml
index e361ba3..da70b24 100644
--- a/hadoop-tools/hadoop-sls/pom.xml
+++ b/hadoop-tools/hadoop-sls/pom.xml
@@ -70,6 +70,10 @@
       <artifactId>jetty-util</artifactId>
       <scope>provided</scope>
     </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+    </dependency>
   </dependencies>
 
   <build>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java
index 0d0745c..76bcb15 100644
--- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java
+++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java
@@ -34,6 +34,9 @@ import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeSet;
 
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
@@ -41,9 +44,6 @@ import org.apache.commons.cli.Options;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.yarn.sls.utils.SLSUtils;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.ObjectWriter;
 
 @Private
 @Unstable
@@ -127,7 +127,7 @@ public class RumenToSLSConverter {
         ObjectMapper mapper = new ObjectMapper();
         ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
         Iterator<Map> i = mapper.readValues(
-                new JsonFactory().createJsonParser(input), Map.class);
+            new JsonFactory().createParser(input), Map.class);
         while (i.hasNext()) {
           Map m = i.next();
           output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java
index c9c5c38..61738fb 100644
--- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java
+++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java
@@ -33,6 +33,9 @@ import java.util.Map;
 import java.util.Random;
 import java.util.Set;
 
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
@@ -66,8 +69,6 @@ import org.apache.hadoop.yarn.sls.scheduler.TaskRunner;
 import org.apache.hadoop.yarn.sls.utils.SLSUtils;
 import org.apache.hadoop.yarn.util.resource.Resources;
 import org.apache.log4j.Logger;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.ObjectMapper;
 
 @Private
 @Unstable
@@ -281,7 +282,7 @@ public class SLSRunner {
       Reader input =
           new InputStreamReader(new FileInputStream(inputTrace), "UTF-8");
       try {
-        Iterator<Map> i = mapper.readValues(jsonF.createJsonParser(input),
+        Iterator<Map> i = mapper.readValues(jsonF.createParser(input),
                 Map.class);
         while (i.hasNext()) {
           Map jsonJob = i.next();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2cc841f1/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java
index f1b4f07..e5f7cd0 100644
--- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java
+++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java
@@ -28,6 +28,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.conf.Configuration;
@@ -37,8 +39,6 @@ import org.apache.hadoop.tools.rumen.JobTraceReader;
 import org.apache.hadoop.tools.rumen.LoggedJob;
 import org.apache.hadoop.tools.rumen.LoggedTask;
 import org.apache.hadoop.tools.rumen.LoggedTaskAttempt;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.ObjectMapper;
 
 @Private
 @Unstable
@@ -106,8 +106,7 @@ public class SLSUtils {
     Reader input =
         new InputStreamReader(new FileInputStream(jobTrace), "UTF-8");
     try {
-      Iterator<Map> i = mapper.readValues(
-              jsonF.createJsonParser(input), Map.class);
+      Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
       while (i.hasNext()) {
         Map jsonE = i.next();
         List tasks = (List) jsonE.get("job.tasks");
@@ -134,8 +133,7 @@ public class SLSUtils {
     Reader input =
         new InputStreamReader(new FileInputStream(nodeFile), "UTF-8");
     try {
-      Iterator<Map> i = mapper.readValues(
-              jsonF.createJsonParser(input), Map.class);
+      Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
       while (i.hasNext()) {
         Map jsonE = i.next();
         String rack = "/" + jsonE.get("rack");


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org