You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@avro.apache.org by cu...@apache.org on 2009/05/22 21:29:09 UTC

svn commit: r777671 - in /hadoop/avro/trunk: ./ lib/ src/java/org/apache/avro/ src/java/org/apache/avro/generic/ src/java/org/apache/avro/specific/

Author: cutting
Date: Fri May 22 19:29:08 2009
New Revision: 777671

URL: http://svn.apache.org/viewvc?rev=777671&view=rev
Log:
AVRO-34.  Upgrade to Jackson version 1.0.0.

Added:
    hadoop/avro/trunk/lib/jackson-core-asl-1.0.0.jar   (with props)
    hadoop/avro/trunk/lib/jackson-mapper-asl-1.0.0.jar   (with props)
Removed:
    hadoop/avro/trunk/lib/jackson-asl-0.9.4.jar
Modified:
    hadoop/avro/trunk/CHANGES.txt
    hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java
    hadoop/avro/trunk/src/java/org/apache/avro/Schema.java
    hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumReader.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificCompiler.java

Modified: hadoop/avro/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/CHANGES.txt?rev=777671&r1=777670&r2=777671&view=diff
==============================================================================
--- hadoop/avro/trunk/CHANGES.txt (original)
+++ hadoop/avro/trunk/CHANGES.txt Fri May 22 19:29:08 2009
@@ -41,6 +41,8 @@
     AVRO-26. Switch tests from JUnit to TestNG.  (Konstantin Boudnik
     via cutting)
 
+    AVRO-34. Upgrade to Jackson version 1.0.0.  (cutting)
+
   OPTIMIZATIONS
 
   BUG FIXES

Added: hadoop/avro/trunk/lib/jackson-core-asl-1.0.0.jar
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/lib/jackson-core-asl-1.0.0.jar?rev=777671&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/avro/trunk/lib/jackson-core-asl-1.0.0.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/avro/trunk/lib/jackson-mapper-asl-1.0.0.jar
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/lib/jackson-mapper-asl-1.0.0.jar?rev=777671&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/avro/trunk/lib/jackson-mapper-asl-1.0.0.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java?rev=777671&r1=777670&r2=777671&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java Fri May 22 19:29:08 2009
@@ -30,7 +30,7 @@
 
 import org.codehaus.jackson.JsonParseException;
 import org.codehaus.jackson.JsonParser;
-import org.codehaus.jackson.map.JsonNode;
+import org.codehaus.jackson.JsonNode;
 
 import org.apache.avro.Schema.Field;
 
@@ -216,7 +216,7 @@
   private static Protocol parse(JsonParser parser) {
     try {
       Protocol protocol = new Protocol();
-      protocol.parse(Schema.MAPPER.read(parser));
+      protocol.parse(Schema.MAPPER.readTree(parser));
       return protocol;
     } catch (IOException e) {
       throw new SchemaParseException(e);
@@ -231,21 +231,21 @@
   }
 
   private void parseNamespace(JsonNode json) {
-    JsonNode nameNode = json.getFieldValue("namespace");
+    JsonNode nameNode = json.get("namespace");
     if (nameNode == null) return;                 // no namespace defined
     this.namespace = nameNode.getTextValue();
     types.space(this.namespace);
   }
 
   private void parseName(JsonNode json) {
-    JsonNode nameNode = json.getFieldValue("protocol");
+    JsonNode nameNode = json.get("protocol");
     if (nameNode == null)
       throw new SchemaParseException("No protocol name specified: "+json);
     this.name = nameNode.getTextValue();
   }
 
   private void parseTypes(JsonNode json) {
-    JsonNode defs = json.getFieldValue("types");
+    JsonNode defs = json.get("types");
     if (defs == null) return;                    // no types defined
     if (!defs.isArray())
       throw new SchemaParseException("Types not an array: "+defs);
@@ -257,40 +257,40 @@
   }
 
   private void parseMessages(JsonNode json) {
-    JsonNode defs = json.getFieldValue("messages");
+    JsonNode defs = json.get("messages");
     if (defs == null) return;                    // no messages defined
     for (Iterator<String> i = defs.getFieldNames(); i.hasNext();) {
       String prop = i.next();
-      this.messages.put(prop, parseMessage(prop, defs.getFieldValue(prop)));
+      this.messages.put(prop, parseMessage(prop, defs.get(prop)));
     }
   }
 
   private Message parseMessage(String messageName, JsonNode json) {
-    JsonNode requestNode = json.getFieldValue("request");
+    JsonNode requestNode = json.get("request");
     if (requestNode == null || !requestNode.isArray())
       throw new SchemaParseException("No request specified: "+json);
     LinkedHashMap<String,Field> fields = new LinkedHashMap<String,Field>();
     for (JsonNode field : requestNode) {
-      JsonNode fieldNameNode = field.getFieldValue("name");
+      JsonNode fieldNameNode = field.get("name");
       if (fieldNameNode == null)
         throw new SchemaParseException("No param name: "+field);
-      JsonNode fieldTypeNode = field.getFieldValue("type");
+      JsonNode fieldTypeNode = field.get("type");
       if (fieldTypeNode == null)
         throw new SchemaParseException("No param type: "+field);
       fields.put(fieldNameNode.getTextValue(),
                  new Field(Schema.parse(fieldTypeNode,types),
-                           field.getFieldValue("default")));
+                           field.get("default")));
     }
     Schema request = Schema.createRecord(fields);
     
-    JsonNode responseNode = json.getFieldValue("response");
+    JsonNode responseNode = json.get("response");
     if (responseNode == null)
       throw new SchemaParseException("No response specified: "+json);
     Schema response = Schema.parse(responseNode, types);
 
     List<Schema> errs = new ArrayList<Schema>();
     errs.add(SYSTEM_ERROR);                       // every method can throw
-    JsonNode decls = json.getFieldValue("errors");
+    JsonNode decls = json.get("errors");
     if (decls != null) {
       if (!decls.isArray())
         throw new SchemaParseException("Errors not an array: "+json);

Modified: hadoop/avro/trunk/src/java/org/apache/avro/Schema.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/Schema.java?rev=777671&r1=777670&r2=777671&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/Schema.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/Schema.java Fri May 22 19:29:08 2009
@@ -32,8 +32,8 @@
 import org.codehaus.jackson.JsonFactory;
 import org.codehaus.jackson.JsonParseException;
 import org.codehaus.jackson.JsonParser;
-import org.codehaus.jackson.map.JsonNode;
-import org.codehaus.jackson.map.JsonTypeMapper;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.map.ObjectMapper;
 
 /** An abstract data type.
  * <p>A schema may be one of:
@@ -55,7 +55,7 @@
  * </ul>
  */
 public abstract class Schema {
-  static final JsonTypeMapper MAPPER = new JsonTypeMapper();
+  static final ObjectMapper MAPPER = new ObjectMapper();
   static final JsonFactory FACTORY = new JsonFactory();
 
   static {
@@ -512,7 +512,7 @@
   public static Schema parse(File file) throws IOException {
     JsonParser parser = FACTORY.createJsonParser(file);
     try {
-      return Schema.parse(MAPPER.read(parser), new Names());
+      return Schema.parse(MAPPER.readTree(parser), new Names());
     } catch (JsonParseException e) {
       throw new SchemaParseException(e);
     }
@@ -574,16 +574,16 @@
         throw new SchemaParseException("Undefined name: "+schema);
       return result;
     } else if (schema.isObject()) {
-      JsonNode typeNode = schema.getFieldValue("type");
+      JsonNode typeNode = schema.get("type");
       if (typeNode == null)
         throw new SchemaParseException("No type: "+schema);
       String type = typeNode.getTextValue();
       String name = null, space = null;
       if (type.equals("record") || type.equals("error")
           || type.equals("enum") || type.equals("fixed")) {
-        JsonNode nameNode = schema.getFieldValue("name");
+        JsonNode nameNode = schema.get("name");
         name = nameNode != null ? nameNode.getTextValue() : null;
-        JsonNode spaceNode = schema.getFieldValue("namespace");
+        JsonNode spaceNode = schema.get("namespace");
         space = spaceNode!=null?spaceNode.getTextValue():names.space();
       }
       if (type.equals("record") || type.equals("error")) { // record
@@ -591,24 +591,24 @@
         RecordSchema result =
           new RecordSchema(name, space, type.equals("error"));
         if (name != null) names.put(name, result);
-        JsonNode fieldsNode = schema.getFieldValue("fields");
+        JsonNode fieldsNode = schema.get("fields");
         if (fieldsNode == null || !fieldsNode.isArray())
           throw new SchemaParseException("Record has no fields: "+schema);
         for (JsonNode field : fieldsNode) {
-          JsonNode fieldNameNode = field.getFieldValue("name");
+          JsonNode fieldNameNode = field.get("name");
           if (fieldNameNode == null)
             throw new SchemaParseException("No field name: "+field);
-          JsonNode fieldTypeNode = field.getFieldValue("type");
+          JsonNode fieldTypeNode = field.get("type");
           if (fieldTypeNode == null)
             throw new SchemaParseException("No field type: "+field);
           Schema fieldSchema = parse(fieldTypeNode, names);
           fields.put(fieldNameNode.getTextValue(),
-                     new Field(fieldSchema, field.getFieldValue("default")));
+                     new Field(fieldSchema, field.get("default")));
         }
         result.setFields(fields);
         return result;
       } else if (type.equals("enum")) {           // enum
-        JsonNode symbolsNode = schema.getFieldValue("symbols");
+        JsonNode symbolsNode = schema.get("symbols");
         if (symbolsNode == null || !symbolsNode.isArray())
           throw new SchemaParseException("Enum has no symbols: "+schema);
         List<String> symbols = new ArrayList<String>();
@@ -618,12 +618,12 @@
         if (name != null) names.put(name, result);
         return result;
       } else if (type.equals("array")) {          // array
-        return new ArraySchema(parse(schema.getFieldValue("items"), names));
+        return new ArraySchema(parse(schema.get("items"), names));
       } else if (type.equals("map")) {            // map
-        return new MapSchema(parse(schema.getFieldValue("values"), names));
+        return new MapSchema(parse(schema.get("values"), names));
       } else if (type.equals("fixed")) {          // fixed
-        Schema result = new FixedSchema(name, space, schema
-                                        .getFieldValue("size").getIntValue());
+        Schema result = new FixedSchema(name, space,
+                                        schema.get("size").getIntValue());
         if (name != null) names.put(name, result);
         return result;
       } else
@@ -640,7 +640,7 @@
 
   static JsonNode parseJson(String s) {
     try {
-      return MAPPER.read(FACTORY.createJsonParser(new StringReader(s)));
+      return MAPPER.readTree(FACTORY.createJsonParser(new StringReader(s)));
     } catch (JsonParseException e) {
       throw new RuntimeException(e);
     } catch (IOException e) {

Modified: hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumReader.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumReader.java?rev=777671&r1=777670&r2=777671&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumReader.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumReader.java Fri May 22 19:29:08 2009
@@ -24,7 +24,7 @@
 import java.util.Set;
 import java.nio.ByteBuffer;
 
-import org.codehaus.jackson.map.JsonNode;
+import org.codehaus.jackson.JsonNode;
 
 import org.apache.avro.AvroRuntimeException;
 import org.apache.avro.AvroTypeException;
@@ -199,7 +199,7 @@
       for (Map.Entry<String, Field> entry : schema.getFields().entrySet()) {
         String name = entry.getKey();
         Field f = entry.getValue();
-        JsonNode v = json.getFieldValue(name);
+        JsonNode v = json.get(name);
         if (v == null) v = f.defaultValue();
         if (v != null) {
           Object o = old != null ? getField(old, name, f.pos()) : null;

Modified: hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificCompiler.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificCompiler.java?rev=777671&r1=777670&r2=777671&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificCompiler.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificCompiler.java Fri May 22 19:29:08 2009
@@ -25,11 +25,11 @@
 import org.apache.avro.Schema;
 import org.apache.avro.Protocol.Message;
 import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.JsonTypeMapper;
+import org.codehaus.jackson.map.ObjectMapper;
 
 /** Generate specific Java interfaces and classes for protocols and schemas. */
 public class SpecificCompiler {
-  private static final JsonTypeMapper MAPPER = new JsonTypeMapper();
+  private static final ObjectMapper MAPPER = new ObjectMapper();
   private static final JsonFactory FACTORY = new JsonFactory();
 
   private String namespace;