You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2010/12/04 08:13:12 UTC
svn commit: r1042107 [4/6] - in /hadoop/common/branches/HADOOP-6685: ./ ivy/
src/java/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/io/
src/java/org/apache/hadoop/io/file/tfile/
src/java/org/apache/hadoop/io/serial/ src/java/org/apache/had...
Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/SerializationMetadata.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/SerializationMetadata.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/SerializationMetadata.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/SerializationMetadata.java Sat Dec 4 07:13:10 2010
@@ -0,0 +1,763 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: src/protobuf/SerializationMetadata.proto
+
+package org.apache.hadoop.io.serial.lib;
+
+public final class SerializationMetadata {
+ private SerializationMetadata() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public static final class TypedSerializationMetadata extends
+ com.google.protobuf.GeneratedMessage {
+ // Use TypedSerializationMetadata.newBuilder() to construct.
+ private TypedSerializationMetadata() {
+ initFields();
+ }
+ private TypedSerializationMetadata(boolean noInit) {}
+
+ private static final TypedSerializationMetadata defaultInstance;
+ public static TypedSerializationMetadata getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public TypedSerializationMetadata getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.io.serial.lib.SerializationMetadata.internal_static_org_apache_hadoop_io_serial_lib_TypedSerializationMetadata_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.io.serial.lib.SerializationMetadata.internal_static_org_apache_hadoop_io_serial_lib_TypedSerializationMetadata_fieldAccessorTable;
+ }
+
+ // optional string typename = 1;
+ public static final int TYPENAME_FIELD_NUMBER = 1;
+ private boolean hasTypename;
+ private java.lang.String typename_ = "";
+ public boolean hasTypename() { return hasTypename; }
+ public java.lang.String getTypename() { return typename_; }
+
+ private void initFields() {
+ }
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (hasTypename()) {
+ output.writeString(1, getTypename());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (hasTypename()) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeStringSize(1, getTypename());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder> {
+ private org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata result;
+
+ // Construct using org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata.newBuilder()
+ private Builder() {}
+
+ private static Builder create() {
+ Builder builder = new Builder();
+ builder.result = new org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata();
+ return builder;
+ }
+
+ protected org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata internalGetResult() {
+ return result;
+ }
+
+ public Builder clear() {
+ if (result == null) {
+ throw new IllegalStateException(
+ "Cannot call clear() after build().");
+ }
+ result = new org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(result);
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata.getDescriptor();
+ }
+
+ public org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata getDefaultInstanceForType() {
+ return org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata.getDefaultInstance();
+ }
+
+ public boolean isInitialized() {
+ return result.isInitialized();
+ }
+ public org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata build() {
+ if (result != null && !isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return buildPartial();
+ }
+
+ private org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ if (!isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return buildPartial();
+ }
+
+ public org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata buildPartial() {
+ if (result == null) {
+ throw new IllegalStateException(
+ "build() has already been called on this Builder.");
+ }
+ org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata returnMe = result;
+ result = null;
+ return returnMe;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata) {
+ return mergeFrom((org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata other) {
+ if (other == org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata.getDefaultInstance()) return this;
+ if (other.hasTypename()) {
+ setTypename(other.getTypename());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ setTypename(input.readString());
+ break;
+ }
+ }
+ }
+ }
+
+
+ // optional string typename = 1;
+ public boolean hasTypename() {
+ return result.hasTypename();
+ }
+ public java.lang.String getTypename() {
+ return result.getTypename();
+ }
+ public Builder setTypename(java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ result.hasTypename = true;
+ result.typename_ = value;
+ return this;
+ }
+ public Builder clearTypename() {
+ result.hasTypename = false;
+ result.typename_ = getDefaultInstance().getTypename();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:org.apache.hadoop.io.serial.lib.TypedSerializationMetadata)
+ }
+
+ static {
+ defaultInstance = new TypedSerializationMetadata(true);
+ org.apache.hadoop.io.serial.lib.SerializationMetadata.internalForceInit();
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:org.apache.hadoop.io.serial.lib.TypedSerializationMetadata)
+ }
+
+ public static final class AvroMetadata extends
+ com.google.protobuf.GeneratedMessage {
+ // Use AvroMetadata.newBuilder() to construct.
+ private AvroMetadata() {
+ initFields();
+ }
+ private AvroMetadata(boolean noInit) {}
+
+ private static final AvroMetadata defaultInstance;
+ public static AvroMetadata getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public AvroMetadata getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.io.serial.lib.SerializationMetadata.internal_static_org_apache_hadoop_io_serial_lib_AvroMetadata_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.io.serial.lib.SerializationMetadata.internal_static_org_apache_hadoop_io_serial_lib_AvroMetadata_fieldAccessorTable;
+ }
+
+ public enum Kind
+ implements com.google.protobuf.ProtocolMessageEnum {
+ SPECIFIC(0, 1),
+ GENERIC(1, 2),
+ REFLECTION(2, 3),
+ ;
+
+
+ public final int getNumber() { return value; }
+
+ public static Kind valueOf(int value) {
+ switch (value) {
+ case 1: return SPECIFIC;
+ case 2: return GENERIC;
+ case 3: return REFLECTION;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<Kind>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<Kind>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<Kind>() {
+ public Kind findValueByNumber(int number) {
+ return Kind.valueOf(number)
+ ; }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final Kind[] VALUES = {
+ SPECIFIC, GENERIC, REFLECTION,
+ };
+ public static Kind valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+ private final int index;
+ private final int value;
+ private Kind(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ static {
+ org.apache.hadoop.io.serial.lib.SerializationMetadata.getDescriptor();
+ }
+
+ // @@protoc_insertion_point(enum_scope:org.apache.hadoop.io.serial.lib.AvroMetadata.Kind)
+ }
+
+ // optional string schema = 1;
+ public static final int SCHEMA_FIELD_NUMBER = 1;
+ private boolean hasSchema;
+ private java.lang.String schema_ = "";
+ public boolean hasSchema() { return hasSchema; }
+ public java.lang.String getSchema() { return schema_; }
+
+ // optional .org.apache.hadoop.io.serial.lib.AvroMetadata.Kind kind = 2;
+ public static final int KIND_FIELD_NUMBER = 2;
+ private boolean hasKind;
+ private org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.Kind kind_;
+ public boolean hasKind() { return hasKind; }
+ public org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.Kind getKind() { return kind_; }
+
+ private void initFields() {
+ kind_ = org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.Kind.SPECIFIC;
+ }
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (hasSchema()) {
+ output.writeString(1, getSchema());
+ }
+ if (hasKind()) {
+ output.writeEnum(2, getKind().getNumber());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (hasSchema()) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeStringSize(1, getSchema());
+ }
+ if (hasKind()) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(2, getKind().getNumber());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder> {
+ private org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata result;
+
+ // Construct using org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.newBuilder()
+ private Builder() {}
+
+ private static Builder create() {
+ Builder builder = new Builder();
+ builder.result = new org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata();
+ return builder;
+ }
+
+ protected org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata internalGetResult() {
+ return result;
+ }
+
+ public Builder clear() {
+ if (result == null) {
+ throw new IllegalStateException(
+ "Cannot call clear() after build().");
+ }
+ result = new org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(result);
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.getDescriptor();
+ }
+
+ public org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata getDefaultInstanceForType() {
+ return org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.getDefaultInstance();
+ }
+
+ public boolean isInitialized() {
+ return result.isInitialized();
+ }
+ public org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata build() {
+ if (result != null && !isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return buildPartial();
+ }
+
+ private org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ if (!isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return buildPartial();
+ }
+
+ public org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata buildPartial() {
+ if (result == null) {
+ throw new IllegalStateException(
+ "build() has already been called on this Builder.");
+ }
+ org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata returnMe = result;
+ result = null;
+ return returnMe;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata) {
+ return mergeFrom((org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata other) {
+ if (other == org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.getDefaultInstance()) return this;
+ if (other.hasSchema()) {
+ setSchema(other.getSchema());
+ }
+ if (other.hasKind()) {
+ setKind(other.getKind());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ setSchema(input.readString());
+ break;
+ }
+ case 16: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.Kind value = org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.Kind.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(2, rawValue);
+ } else {
+ setKind(value);
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // optional string schema = 1;
+ public boolean hasSchema() {
+ return result.hasSchema();
+ }
+ public java.lang.String getSchema() {
+ return result.getSchema();
+ }
+ public Builder setSchema(java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ result.hasSchema = true;
+ result.schema_ = value;
+ return this;
+ }
+ public Builder clearSchema() {
+ result.hasSchema = false;
+ result.schema_ = getDefaultInstance().getSchema();
+ return this;
+ }
+
+ // optional .org.apache.hadoop.io.serial.lib.AvroMetadata.Kind kind = 2;
+ public boolean hasKind() {
+ return result.hasKind();
+ }
+ public org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.Kind getKind() {
+ return result.getKind();
+ }
+ public Builder setKind(org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.Kind value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ result.hasKind = true;
+ result.kind_ = value;
+ return this;
+ }
+ public Builder clearKind() {
+ result.hasKind = false;
+ result.kind_ = org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.Kind.SPECIFIC;
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:org.apache.hadoop.io.serial.lib.AvroMetadata)
+ }
+
+ static {
+ defaultInstance = new AvroMetadata(true);
+ org.apache.hadoop.io.serial.lib.SerializationMetadata.internalForceInit();
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:org.apache.hadoop.io.serial.lib.AvroMetadata)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_org_apache_hadoop_io_serial_lib_TypedSerializationMetadata_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_org_apache_hadoop_io_serial_lib_TypedSerializationMetadata_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_org_apache_hadoop_io_serial_lib_AvroMetadata_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_org_apache_hadoop_io_serial_lib_AvroMetadata_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n(src/protobuf/SerializationMetadata.pro" +
+ "to\022\037org.apache.hadoop.io.serial.lib\".\n\032T" +
+ "ypedSerializationMetadata\022\020\n\010typename\030\001 " +
+ "\001(\t\"\223\001\n\014AvroMetadata\022\016\n\006schema\030\001 \001(\t\022@\n\004" +
+ "kind\030\002 \001(\01622.org.apache.hadoop.io.serial" +
+ ".lib.AvroMetadata.Kind\"1\n\004Kind\022\014\n\010SPECIF" +
+ "IC\020\001\022\013\n\007GENERIC\020\002\022\016\n\nREFLECTION\020\003"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_org_apache_hadoop_io_serial_lib_TypedSerializationMetadata_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_org_apache_hadoop_io_serial_lib_TypedSerializationMetadata_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_org_apache_hadoop_io_serial_lib_TypedSerializationMetadata_descriptor,
+ new java.lang.String[] { "Typename", },
+ org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata.class,
+ org.apache.hadoop.io.serial.lib.SerializationMetadata.TypedSerializationMetadata.Builder.class);
+ internal_static_org_apache_hadoop_io_serial_lib_AvroMetadata_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_org_apache_hadoop_io_serial_lib_AvroMetadata_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_org_apache_hadoop_io_serial_lib_AvroMetadata_descriptor,
+ new java.lang.String[] { "Schema", "Kind", },
+ org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.class,
+ org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata.Builder.class);
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ }, assigner);
+ }
+
+ public static void internalForceInit() {}
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/WritableSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/WritableSerialization.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/WritableSerialization.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/WritableSerialization.java Sat Dec 4 07:13:10 2010
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.io.serial.lib;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.serial.RawComparator;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableComparator;
+import org.apache.hadoop.io.serial.TypedSerialization;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * A {@link TypedSerialization} for {@link Writable}s that delegates to
+ * {@link Writable#write} and {@link Writable#readFields}.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class WritableSerialization extends TypedSerialization<Writable> {
+
+ public WritableSerialization() {}
+
+ public WritableSerialization(Class<? extends Writable> specificType) {
+ super(specificType);
+ }
+
+ @Override
+ public Writable deserialize(InputStream stream,
+ Writable w,
+ Configuration conf) throws IOException {
+ Writable writable;
+ if (w == null) {
+ writable = (Writable) ReflectionUtils.newInstance(specificType, conf);
+ } else {
+ if (w.getClass() != specificType) {
+ throw new IllegalArgumentException("Type mismatch in deserialization: "+
+ "expected: " + specificType +
+ "; received " + w.getClass());
+ }
+ writable = w;
+ }
+ writable.readFields(ensureDataInput(stream));
+ return writable;
+ }
+
+ @Override
+ public void serialize(OutputStream out, Writable w) throws IOException {
+ if (specificType != w.getClass()) {
+ throw new IOException("Type mismatch in serialization: expected "
+ + specificType + "; received " + w.getClass());
+ }
+ w.write(ensureDataOutput(out));
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public RawComparator getRawComparator() {
+ return (RawComparator) WritableComparator.get(
+ (Class<WritableComparable>) specificType);
+ }
+
+ @Override
+ public Class<Writable> getBaseType() {
+ return Writable.class;
+ }
+
+ @Override
+ public String getName() {
+ return "writable";
+ }
+}
Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroComparator.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroComparator.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroComparator.java Sat Dec 4 07:13:10 2010
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.io.serial.lib.avro;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.BinaryData;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.io.serial.RawComparator;
+
+/**
+ * <p>
+ * A {@link RawComparator} that uses Avro to extract data from the
+ * source stream and compare their contents without explicit
+ * deserialization.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class AvroComparator implements RawComparator {
+
+ private final Schema schema;
+
+ public AvroComparator(final Schema s) {
+ this.schema = s;
+ }
+
+ public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+ return BinaryData.compare(b1, s1, b2, s2, schema);
+ }
+
+}
Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroReflectSerializable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroReflectSerializable.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroReflectSerializable.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroReflectSerializable.java Sat Dec 4 07:13:10 2010
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.io.serial.lib.avro;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Tag interface for Avro 'reflect' serializable classes. Classes implementing
+ * this interface can be serialized/deserialized using
+ * {@link AvroSerialization}.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public interface AvroReflectSerializable {
+
+}
Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroSerialization.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroSerialization.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/AvroSerialization.java Sat Dec 4 07:13:10 2010
@@ -0,0 +1,333 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.io.serial.lib.avro;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.reflect.ReflectDatumWriter;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.specific.SpecificDatumWriter;
+import org.apache.avro.specific.SpecificRecord;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.serial.RawComparator;
+import org.apache.hadoop.io.serial.TypedSerialization;
+import org.apache.hadoop.io.serial.lib.SerializationMetadata.AvroMetadata;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.yaml.snakeyaml.Yaml;
+
+/**
+ * A binding for Avro binary serialization. It handles generic, specific, and
+ * reflection Java Avro serialization.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class AvroSerialization extends TypedSerialization<Object>
+ implements Configurable {
+ /**
+ * Key to configure packages that contain classes to be serialized and
+ * deserialized using this class. Multiple packages can be specified using
+ * comma-separated list.
+ */
+ public static final String AVRO_REFLECT_PACKAGES = "avro.reflect.pkgs";
+
+ public static enum Kind {
+ GENERIC(AvroMetadata.Kind.GENERIC),
+ SPECIFIC(AvroMetadata.Kind.SPECIFIC),
+ REFLECTION(AvroMetadata.Kind.REFLECTION);
+
+ private static final EnumMap<AvroMetadata.Kind, Kind> translation =
+ new EnumMap<AvroMetadata.Kind,Kind>(AvroMetadata.Kind.class);
+ static {
+ for (Kind value: Kind.class.getEnumConstants()) {
+ translation.put(value.kind, value);
+ }
+ }
+
+ private AvroMetadata.Kind kind;
+ private Kind(AvroMetadata.Kind kind) {
+ this.kind = kind;
+ }
+
+ /**
+ * Get the serialized form of the given enumeration.
+ * @return the serializable kind
+ */
+ @InterfaceAudience.Private
+ public AvroMetadata.Kind getMetadataKind() {
+ return kind;
+ }
+
+ /**
+ * Get the kind from the serialization enumeration.
+ * @param kind the serialization enumeration
+ * @return the internal kind
+ */
+ @InterfaceAudience.Private
+ public static Kind fromMetadataKind(AvroMetadata.Kind kind) {
+ return translation.get(kind);
+ }
+ }
+
+ private static final DecoderFactory decoderFactory =
+ DecoderFactory.defaultFactory();
+
+ private Configuration conf;
+ private Set<String> packages;
+ private Kind kind;
+ private Schema schema;
+ private DatumWriter<Object> writer;
+ private DatumReader<Object> reader;
+ private Encoder encoder;
+ private Decoder decoder;
+
+ private void setKind(Kind kind) {
+ this.kind = kind;
+ if (kind != null) {
+ switch (kind) {
+ case GENERIC:
+ writer = new GenericDatumWriter<Object>();
+ reader = new GenericDatumReader<Object>();
+ break;
+ case SPECIFIC:
+ writer = new SpecificDatumWriter<Object>();
+ reader = new SpecificDatumReader<Object>();
+ break;
+ case REFLECTION:
+ writer = new ReflectDatumWriter<Object>();
+ reader = new ReflectDatumReader<Object>();
+ break;
+ }
+ }
+ }
+
+ public AvroSerialization() {
+ this(null);
+ }
+
+ public AvroSerialization(Kind kind) {
+ setKind(kind);
+ encoder = new BinaryEncoder(null);
+ decoder = decoderFactory.createBinaryDecoder((InputStream) null, null);
+ }
+
+ /**
+ * Get the schema.
+ * @return the avro schema
+ */
+ public Schema getSchema() {
+ return schema;
+ }
+
+ /**
+ * Set the schema to the given value.
+ * @param schema the new schema
+ * @return returns this serialization so that you can use it like a builder
+ */
+ public AvroSerialization setSchema(Schema schema) {
+ this.schema = schema;
+ if (kind != null) {
+ writer.setSchema(schema);
+ reader.setSchema(schema);
+ }
+ return this;
+ }
+
+ @Override
+ public void serialize(OutputStream out, Object obj) throws IOException {
+ encoder.init(out);
+ writer.write(obj, encoder);
+ encoder.flush();
+ }
+
+ @Override
+ public Object deserialize(InputStream in, Object reuse, Configuration conf
+ ) throws IOException {
+ decoder.init(in);
+ Object result = reader.read(reuse, decoder);
+ // configure the object, if it wants to be
+ if (result != reuse) {
+ ReflectionUtils.setConf(result, conf);
+ }
+ return result;
+ }
+
+ /**
+ * Provides a raw comparator for Avro-encoded serialized data.
+ * @return a RawComparator parameterized for the specified Avro schema.
+ */
+ @Override
+ public RawComparator getRawComparator() {
+ return new AvroComparator(schema);
+ }
+
+ @Override
+ public AvroSerialization clone() {
+ AvroSerialization result = (AvroSerialization) super.clone();
+ result.setKind(kind);
+ result.setSchema(schema);
+ return result;
+ }
+
+ @Override
+ public void deserializeSelf(InputStream in,
+ Configuration conf) throws IOException {
+ AvroMetadata meta = AvroMetadata.parseFrom(in);
+ if (kind == null) {
+ setKind(Kind.fromMetadataKind(meta.getKind()));
+ }
+ setSchema(Schema.parse(meta.getSchema()));
+ }
+
+ @Override
+ public void serializeSelf(OutputStream out) throws IOException {
+ AvroMetadata.newBuilder().setKind(kind.kind).setSchema(schema.toString()).
+ build().writeTo(out);
+ }
+
+ private static final String KIND_ATTRIBUTE = "kind";
+ private static final String SCHEMA_ATTRIBUTE = "schema";
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public void fromString(String meta) throws IOException {
+ Yaml yaml = new Yaml();
+ Map<String, String> map = (Map<String,String>) yaml.load(meta);
+ String value = map.get(KIND_ATTRIBUTE);
+ if (kind == null && value != null) {
+ setKind(Kind.valueOf(value));
+ }
+ value = map.get(SCHEMA_ATTRIBUTE);
+ setSchema(Schema.parse(value));
+ }
+
+ public String toString() {
+ Yaml yaml = new Yaml();
+ Map<String,String> map = new HashMap<String,String>();
+ if (kind != null) {
+ map.put(KIND_ATTRIBUTE, kind.toString());
+ }
+ map.put(SCHEMA_ATTRIBUTE, schema.toString());
+ return yaml.dump(map);
+ }
+
+ private boolean isReflection(Class<?> cls) {
+ return AvroReflectSerializable.class.isAssignableFrom(cls) ||
+ getPackages().contains(cls.getPackage().getName());
+ }
+
+ private Set<String> getPackages() {
+ if (packages == null) {
+ String[] pkgList = conf.getStrings(AVRO_REFLECT_PACKAGES);
+ packages = new HashSet<String>();
+ if (pkgList != null) {
+ for (String pkg : pkgList) {
+ packages.add(pkg.trim());
+ }
+ }
+ }
+ return packages;
+ }
+
+ private boolean isSpecific(Class<?> cls) {
+ return SpecificRecord.class.isAssignableFrom(cls);
+ }
+
+ @Override
+ public boolean accept(Class<?> cls) {
+ return isSpecific(cls) || isReflection(cls);
+ }
+
+ @Override
+ public void setSpecificType(Class<? extends Object> cls) {
+ super.setSpecificType(cls);
+ if (isSpecific(cls)) {
+ setKind(Kind.SPECIFIC);
+ setSchema(SpecificData.get().getSchema(cls));
+ } else if (isReflection(cls)) {
+ setKind(Kind.REFLECTION);
+ setSchema(ReflectData.get().getSchema(cls));
+ } else {
+ throw new IllegalArgumentException("class " + cls.getName() +
+ " can't infer schema.");
+ }
+ }
+
+ @Override
+ public Class<Object> getBaseType() {
+ // Unlike most of the typed serializations, we don't have a
+ // single base type and the work has to be done in a special accept method.
+ return Object.class;
+ }
+
+ @Override
+ public String getName() {
+ return "avro";
+ }
+
+ @Override
+ public Configuration getConf() {
+ return conf;
+ }
+
+ @Override
+ public void setConf(Configuration conf) {
+ if (conf != this.conf) {
+ this.conf = conf;
+ // clear the cache of packages
+ packages = null;
+ }
+ }
+
+ public boolean equals(Object right) {
+ if (this == right) {
+ return true;
+ } else if (right == null || right.getClass() != getClass()) {
+ return false;
+ } else {
+ AvroSerialization rightTyped = (AvroSerialization) right;
+ return rightTyped.kind == kind && rightTyped.schema.equals(schema);
+ }
+ }
+
+ public int hashCode() {
+ return schema.hashCode() * 5 + kind.hashCode();
+ }
+}
Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/package.html
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/package.html?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/package.html (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/avro/package.html Sat Dec 4 07:13:10 2010
@@ -0,0 +1,43 @@
+<html>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<body>
+
+<p>
+This package provides Avro serialization in Hadoop. This can be used to
+serialize/deserialize Avro types in Hadoop.
+</p>
+
+<p>
+Use {@link org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization} for
+serialization of classes generated by Avro's 'specific' compiler.
+</p>
+
+<p>
+Use {@link org.apache.hadoop.io.serializer.avro.AvroReflectSerialization} for
+other classes.
+{@link org.apache.hadoop.io.serializer.avro.AvroReflectSerialization} work for
+any class which is either in the package list configured via
+{@link org.apache.hadoop.io.serializer.avro.AvroReflectSerialization#AVRO_REFLECT_PACKAGES}
+or implement {@link org.apache.hadoop.io.serializer.avro.AvroReflectSerializable}
+interface.
+</p>
+
+</body>
+</html>
Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/protobuf/ProtoBufComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/protobuf/ProtoBufComparator.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/protobuf/ProtoBufComparator.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/protobuf/ProtoBufComparator.java Sat Dec 4 07:13:10 2010
@@ -0,0 +1,490 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.serial.lib.protobuf;
+
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.io.serial.RawComparator;
+
+import com.google.protobuf.ByteString;
+import com.google.protobuf.CodedInputStream;
+import com.google.protobuf.Descriptors.Descriptor;
+import com.google.protobuf.Descriptors.EnumValueDescriptor;
+import com.google.protobuf.Descriptors.FieldDescriptor;
+import com.google.protobuf.Descriptors.FieldDescriptor.Type;
+import com.google.protobuf.Message;
+
+public class ProtoBufComparator implements RawComparator {
+ static final int WIRETYPE_VARINT = 0;
+ static final int WIRETYPE_FIXED64 = 1;
+ static final int WIRETYPE_LENGTH_DELIMITED = 2;
+ static final int WIRETYPE_START_GROUP = 3;
+ static final int WIRETYPE_END_GROUP = 4;
+ static final int WIRETYPE_FIXED32 = 5;
+
+ static final int TAG_TYPE_BITS = 3;
+ static final int TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1;
+
+ private final Map<Descriptor, List<FieldDescriptor>> fieldCache =
+ new HashMap<Descriptor, List<FieldDescriptor>>();
+ private final List<FieldDescriptor> topFields;
+
+ /**
+ * Create a comparator that will compare serialized messages of a particular
+ * class.
+ * @param cls the specific class to compare
+ */
+ public ProtoBufComparator(Class<? extends Message> cls) {
+ if (!Message.class.isAssignableFrom(cls)) {
+ throw new IllegalArgumentException("Type " + cls +
+ "must be a generated protobuf class");
+ }
+ try {
+ Method getDescriptor = cls.getDeclaredMethod("getDescriptor");
+ topFields = addToCache((Descriptor) getDescriptor.invoke(null));
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Can't get descriptors for " + cls, e);
+ }
+ }
+
+ /**
+ * Define a comparator so that we can sort the fields by their field ids.
+ */
+ private static class FieldIdComparator implements Comparator<FieldDescriptor>{
+
+ @Override
+ public int compare(FieldDescriptor left, FieldDescriptor right) {
+ int leftId = left.getNumber();
+ int rightId = right.getNumber();
+ if (leftId == rightId) {
+ return 0;
+ } else {
+ return leftId < rightId ? -1 : 1;
+ }
+ }
+ }
+
+ private static final FieldIdComparator FIELD_COMPARE= new FieldIdComparator();
+
+ /**
+ * Add all of the types that are recursively nested under the given one
+ * to the cache. The fields are sorted by field id.
+ * @param type the type to add
+ * @return the list of sorted fields for the given type
+ */
+ private List<FieldDescriptor> addToCache(Descriptor type) {
+ List<FieldDescriptor> fields =
+ new ArrayList<FieldDescriptor>(type.getFields());
+ Collections.sort(fields, FIELD_COMPARE);
+ fieldCache.put(type, fields);
+ for(Descriptor subMessage: type.getNestedTypes()) {
+ if (!fieldCache.containsKey(subMessage)) {
+ addToCache(subMessage);
+ }
+ }
+ return fields;
+ }
+
+ /**
+ * Compare two serialized protocol buffers using the natural sort order.
+ * @param b1 the left serialized value
+ * @param s1 the first byte index in b1 to compare
+ * @param l1 the number of bytes in b1 to compare
+ * @param b2 the right serialized value
+ * @param s2 the first byte index in b2 to compare
+ * @param l2 the number of bytes in b2 to compare
+ */
+ @Override
+ public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+ CodedInputStream left = CodedInputStream.newInstance(b1,s1,l1);
+ CodedInputStream right = CodedInputStream.newInstance(b2, s2, l2);
+ try {
+ return compareMessage(left, right, topFields);
+ } catch (IOException ie) {
+ throw new IllegalArgumentException("problem running compare", ie);
+ }
+ }
+
+ /**
+ * Advance the stream to the given fieldId or one that is larger.
+ * @param stream the stream to read
+ * @param currentTag the last tag that was read from this stream
+ * @param fieldId the id of the field we are looking for
+ * @return the last tag that was read or 0 for end of stream
+ * @throws IOException
+ */
+ private int advanceTo(CodedInputStream stream,
+ int currentTag,
+ int fieldId) throws IOException {
+ int goal = fieldId << TAG_TYPE_BITS;
+ // if we've already seen the right tag, return it
+ if (currentTag > goal) {
+ return currentTag;
+ }
+ while (!stream.isAtEnd()) {
+ currentTag = stream.readTag();
+ if (currentTag > goal) {
+ return currentTag;
+ } else {
+ stream.skipField(currentTag);
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * Check compatibility between the logical type in the schema and the
+ * wiretype. Incompatible fields are ignored.
+ * @param tag the tag (id and wiretype) of the field
+ * @param type the logical type of the field
+ * @return true if we should use this field for comparing
+ */
+ private boolean isCompatible(int tag, Type type) {
+ int wiretype = tag & TAG_TYPE_MASK;
+ switch (type) {
+ case BOOL:
+ case ENUM:
+ case INT32:
+ case INT64:
+ case SINT32:
+ case SINT64:
+ case UINT32:
+ case UINT64:
+ return wiretype == WIRETYPE_VARINT ||
+ wiretype == WIRETYPE_LENGTH_DELIMITED;
+
+ case BYTES:
+ case MESSAGE:
+ case STRING:
+ return wiretype == WIRETYPE_LENGTH_DELIMITED;
+
+ case FLOAT:
+ case FIXED32:
+ case SFIXED32:
+ return wiretype == WIRETYPE_LENGTH_DELIMITED ||
+ wiretype == WIRETYPE_FIXED32;
+
+ case DOUBLE:
+ case SFIXED64:
+ case FIXED64:
+ return wiretype == WIRETYPE_LENGTH_DELIMITED ||
+ wiretype == WIRETYPE_FIXED64;
+
+ case GROUP:
+ // don't bother dealing with groups, since they aren't used outside of
+ // the protobuf mothership.
+ return false;
+
+ default:
+ throw new IllegalArgumentException("Unknown field type " + type);
+ }
+ }
+
+ /**
+ * Compare two serialized messages of the same type.
+ * @param left the left message
+ * @param right the right message
+ * @param fields the fields of the message
+ * @return -1, 0, or 1 if left is less, equal or greater than right
+ * @throws IOException
+ */
+ private int compareMessage(CodedInputStream left, CodedInputStream right,
+ List<FieldDescriptor> fields
+ ) throws IOException {
+ int leftTag = 0;
+ int rightTag = 0;
+ for(FieldDescriptor field: fields) {
+ int fieldId = field.getNumber();
+ Type fieldType = field.getType();
+ int wireFormat = 0;
+ leftTag = advanceTo(left, leftTag, fieldId);
+ rightTag = advanceTo(right, rightTag, fieldId);
+ boolean leftDefault = (leftTag >>> TAG_TYPE_BITS) != fieldId;
+ boolean rightDefault = (rightTag >>> TAG_TYPE_BITS) != fieldId;
+ // if the fieldType and wiretypes aren't compatible, skip field
+ if (!leftDefault && !isCompatible(leftTag, fieldType)) {
+ leftDefault = true;
+ left.skipField(leftTag);
+ }
+ if (!rightDefault && !isCompatible(rightTag, fieldType)) {
+ rightDefault = true;
+ right.skipField(leftTag);
+ }
+ if (!leftDefault) {
+ wireFormat = leftTag & TAG_TYPE_MASK;
+ // ensure both sides use the same representation
+ if (!rightDefault && leftTag != rightTag) {
+ return leftTag < rightTag ? -1 : 1;
+ }
+ } else if (rightDefault) {
+ continue;
+ }
+ int result;
+ switch (wireFormat) {
+ case WIRETYPE_LENGTH_DELIMITED:
+ switch (fieldType) {
+ case STRING:
+ String leftStr =
+ leftDefault ? (String) field.getDefaultValue() : left.readString();
+ String rightStr =
+ rightDefault ? (String) field.getDefaultValue() :right.readString();
+ result = leftStr.compareTo(rightStr);
+ if (result != 0) {
+ return result;
+ }
+ break;
+ case BYTES:
+ result = compareBytes(leftDefault ?
+ (ByteString) field.getDefaultValue() :
+ left.readBytes(),
+ rightDefault ?
+ (ByteString) field.getDefaultValue() :
+ right.readBytes());
+ if (result != 0) {
+ return result;
+ }
+ break;
+ default:
+ // handle nested messages and packed fields
+ if (leftDefault) {
+ return -1;
+ } else if (rightDefault) {
+ return 1;
+ }
+ int leftLimit = left.readRawVarint32();
+ int rightLimit = right.readRawVarint32();
+ int oldLeft = left.pushLimit(leftLimit);
+ int oldRight = right.pushLimit(rightLimit);
+ while (left.getBytesUntilLimit() > 0 &&
+ right.getBytesUntilLimit() > 0) {
+ result = compareField(field, left, right, false, false);
+ if (result != 0) {
+ return result;
+ }
+ }
+ if (right.getBytesUntilLimit() > 0) {
+ return -1;
+ } else if (left.getBytesUntilLimit() > 0) {
+ return 1;
+ }
+ left.popLimit(oldLeft);
+ right.popLimit(oldRight);
+ }
+ break;
+ case WIRETYPE_VARINT:
+ case WIRETYPE_FIXED32:
+ case WIRETYPE_FIXED64:
+ result = compareField(field, left, right, leftDefault, rightDefault);
+ if (result != 0) {
+ return result;
+ }
+ break;
+ default:
+ throw new IllegalArgumentException("Unknown field encoding " +
+ wireFormat);
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * Compare a single field inside of a message. This is used for both packed
+ * and unpacked fields. It assumes the wire type has already been checked.
+ * @param field the type of the field that we are comparing
+ * @param left the left value
+ * @param right the right value
+ * @param leftDefault use the default value instead of the left value
+ * @param rightDefault use the default value instead of the right value
+ * @return -1, 0, 1 depending on whether left is less, equal or greater than
+ * right
+ * @throws IOException
+ */
+ private int compareField(FieldDescriptor field,
+ CodedInputStream left,
+ CodedInputStream right,
+ boolean leftDefault,
+ boolean rightDefault) throws IOException {
+ switch (field.getType()) {
+ case BOOL:
+ boolean leftBool = leftDefault ?
+ (Boolean) field.getDefaultValue() : left.readBool();
+ boolean rightBool = rightDefault ?
+ (Boolean) field.getDefaultValue() : right.readBool();
+ if (leftBool == rightBool) {
+ return 0;
+ } else {
+ return rightBool ? -1 : 1;
+ }
+ case DOUBLE:
+ return
+ Double.compare(leftDefault ?
+ (Double) field.getDefaultValue(): left.readDouble(),
+ rightDefault ?
+ (Double) field.getDefaultValue() :right.readDouble());
+ case ENUM:
+ return compareInt32(leftDefault ? intDefault(field) : left.readEnum(),
+ rightDefault ? intDefault(field) : right.readEnum());
+ case FIXED32:
+ return compareUInt32(leftDefault ? intDefault(field) : left.readFixed32(),
+ rightDefault?intDefault(field):right.readFixed32());
+ case FIXED64:
+ return compareUInt64(leftDefault? longDefault(field) : left.readFixed64(),
+ rightDefault?longDefault(field):right.readFixed64());
+ case FLOAT:
+ return Float.compare(leftDefault ?
+ (Float) field.getDefaultValue() : left.readFloat(),
+ rightDefault ?
+ (Float) field.getDefaultValue():right.readFloat());
+ case INT32:
+ return compareInt32(leftDefault?intDefault(field):left.readInt32(),
+ rightDefault?intDefault(field):right.readInt32());
+ case INT64:
+ return compareInt64(leftDefault?longDefault(field):left.readInt64(),
+ rightDefault?longDefault(field):right.readInt64());
+ case MESSAGE:
+ return compareMessage(left, right,
+ fieldCache.get(field.getMessageType()));
+ case SFIXED32:
+ return compareInt32(leftDefault ?intDefault(field):left.readSFixed32(),
+ rightDefault ?intDefault(field):right.readSFixed32());
+ case SFIXED64:
+ return compareInt64(leftDefault ? longDefault(field) :left.readSFixed64(),
+ rightDefault?longDefault(field):right.readSFixed64());
+ case SINT32:
+ return compareInt32(leftDefault?intDefault(field):left.readSInt32(),
+ rightDefault?intDefault(field):right.readSInt32());
+ case SINT64:
+ return compareInt64(leftDefault ? longDefault(field) : left.readSInt64(),
+ rightDefault?longDefault(field):right.readSInt64());
+ case UINT32:
+ return compareUInt32(leftDefault ? intDefault(field) :left.readUInt32(),
+ rightDefault ? intDefault(field):right.readUInt32());
+ case UINT64:
+ return compareUInt64(leftDefault ? longDefault(field) :left.readUInt64(),
+ rightDefault?longDefault(field) :right.readUInt64());
+ default:
+ throw new IllegalArgumentException("unknown field type " + field);
+ }
+ }
+
+ /**
+ * Compare 32 bit signed integers.
+ * @param left
+ * @param right
+ * @return -1, 0 ,1 if left is less, equal, or greater to right
+ */
+ private static int compareInt32(int left, int right) {
+ if (left == right) {
+ return 0;
+ } else {
+ return left < right ? -1 : 1;
+ }
+ }
+
+ /**
+ * Compare 64 bit signed integers.
+ * @param left
+ * @param right
+ * @return -1, 0 ,1 if left is less, equal, or greater to right
+ */
+ private static int compareInt64(long left, long right) {
+ if (left == right) {
+ return 0;
+ } else {
+ return left < right ? -1 : 1;
+ }
+ }
+
+ /**
+ * Compare 32 bit logically unsigned integers.
+ * @param left
+ * @param right
+ * @return -1, 0 ,1 if left is less, equal, or greater to right
+ */
+ private static int compareUInt32(int left, int right) {
+ if (left == right) {
+ return 0;
+ } else {
+ return left + Integer.MIN_VALUE < right + Integer.MIN_VALUE ? -1 : 1;
+ }
+ }
+
+ /**
+ * Compare two byte strings using memcmp semantics
+ * @param left
+ * @param right
+ * @return -1, 0, 1 if left is less, equal, or greater than right
+ */
+ private static int compareBytes(ByteString left, ByteString right) {
+ int size = Math.min(left.size(), right.size());
+ for(int i = 0; i < size; ++i) {
+ int leftByte = left.byteAt(i) & 0xff;
+ int rightByte = right.byteAt(i) & 0xff;
+ if (leftByte != rightByte) {
+ return leftByte < rightByte ? -1 : 1;
+ }
+ }
+ if (left.size() != right.size()) {
+ return left.size() < right.size() ? -1 : 1;
+ }
+ return 0;
+ }
+
+ /**
+ * Compare 32 bit logically unsigned integers.
+ * @param left
+ * @param right
+ * @return -1, 0 ,1 if left is less, equal, or greater to right
+ */
+ private static int compareUInt64(long left, long right) {
+ if (left == right) {
+ return 0;
+ } else {
+ return left + Long.MIN_VALUE < right + Long.MIN_VALUE ? -1 : 1;
+ }
+ }
+
+ /**
+ * Get the integer default, including dereferencing enum values.
+ * @param field the field
+ * @return the integer default value
+ */
+ private static int intDefault(FieldDescriptor field) {
+ if (field.getType() == Type.ENUM) {
+ return ((EnumValueDescriptor) field.getDefaultValue()).getNumber();
+ } else {
+ return (Integer) field.getDefaultValue();
+ }
+ }
+
+ /**
+ * Get the long default value for the given field.
+ * @param field the field
+ * @return the default value
+ */
+ private static long longDefault(FieldDescriptor field) {
+ return (Long) field.getDefaultValue();
+ }
+}
Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/protobuf/ProtoBufSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/protobuf/ProtoBufSerialization.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/protobuf/ProtoBufSerialization.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/protobuf/ProtoBufSerialization.java Sat Dec 4 07:13:10 2010
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.io.serial.lib.protobuf;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+import com.google.protobuf.Message;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.serial.RawComparator;
+import org.apache.hadoop.io.serial.TypedSerialization;
+
+/**
+ * A binding for Protocol Buffer serialization.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class ProtoBufSerialization extends TypedSerialization<Message>{
+ private Method builderFactory;
+
+ public ProtoBufSerialization() {}
+ public ProtoBufSerialization(Class<? extends Message> cls) {
+ super(cls);
+ setBuilderFactory(cls);
+ }
+
+ @Override
+ public ProtoBufSerialization clone() {
+ ProtoBufSerialization result = (ProtoBufSerialization) super.clone();
+ result.builderFactory = builderFactory;
+ return result;
+ }
+
+ @Override
+ public Class<Message> getBaseType() {
+ return Message.class;
+ }
+
+ private void setBuilderFactory(Class<? extends Message> cls) {
+ if (cls == null) {
+ builderFactory = null;
+ } else {
+ try {
+ builderFactory = cls.getDeclaredMethod("parseFrom",
+ InputStream.class);
+ } catch (NoSuchMethodException nsme) {
+ throw new IllegalArgumentException("Can't find parseFrom in " +
+ cls.getName());
+ }
+ }
+ }
+
+ @Override
+ public void setSpecificType(Class<? extends Message> cls) {
+ super.setSpecificType(cls);
+ setBuilderFactory(cls);
+ }
+
+ @Override
+ public Message deserialize(InputStream stream, Message reusableObject,
+ Configuration conf) throws IOException {
+ try {
+ return (Message) builderFactory.invoke(null, stream);
+ } catch (IllegalAccessException e) {
+ throw new IllegalArgumentException("can't access parseFrom " +
+ " on " + getSpecificType().getName());
+ } catch (InvocationTargetException e) {
+ throw new IllegalArgumentException("can't invoke parseFrom " +
+ " on " + getSpecificType().getName());
+ }
+ }
+
+ @Override
+ public RawComparator getRawComparator() {
+ return new ProtoBufComparator(getSpecificType());
+ }
+
+ @Override
+ public void serialize(OutputStream stream,
+ Message object) throws IOException {
+ if (specificType != object.getClass()) {
+ throw new IOException("Type mismatch in serialization: expected "
+ + specificType + "; received " + object.getClass());
+ }
+ object.writeTo(stream);
+ }
+
+ @Override
+ public String getName() {
+ return "protobuf";
+ }
+}
Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/thrift/StreamTransport.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/thrift/StreamTransport.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/thrift/StreamTransport.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/thrift/StreamTransport.java Sat Dec 4 07:13:10 2010
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.io.serial.lib.thrift;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+
+/**
+ * Define a Thrift transport that we can dynamically change the input and
+ * output stream. Otherwise, we need to recreate the encoder and decoder
+ * for each object.
+ */
+class StreamTransport extends TTransport {
+ private InputStream in = null;
+ private OutputStream out = null;
+
+ void open(InputStream in) {
+ if (this.in != null || this.out != null) {
+ throw new IllegalStateException("opening an open transport");
+ }
+ this.in = in;
+ }
+
+ void open(OutputStream out) {
+ if (this.in != null || this.out != null) {
+ throw new IllegalStateException("opening an open transport");
+ }
+ this.out = out;
+ }
+
+ @Override
+ public void close() {
+ if (in != null) {
+ in = null;
+ } else if (out != null) {
+ out = null;
+ }
+ }
+
+ @Override
+ public boolean isOpen() {
+ return in != null || out != null;
+ }
+
+ @Override
+ public void open() {
+ // NOTHING
+ }
+
+ @Override
+ public int read(byte[] buf, int off, int len) throws TTransportException {
+ try {
+ return in.read(buf, off, len);
+ } catch (IOException ie) {
+ throw new TTransportException("problem reading stream", ie);
+ }
+ }
+
+ @Override
+ public void write(byte[] buf, int off, int len) throws TTransportException {
+ try {
+ out.write(buf, off, len);
+ } catch (IOException ie) {
+ throw new TTransportException("problem writing stream", ie);
+ }
+ }
+
+}
Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/thrift/ThriftSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/thrift/ThriftSerialization.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/thrift/ThriftSerialization.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/lib/thrift/ThriftSerialization.java Sat Dec 4 07:13:10 2010
@@ -0,0 +1,110 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.serial.lib.thrift;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.serial.RawComparator;
+import org.apache.hadoop.io.serial.TypedSerialization;
+import org.apache.hadoop.io.serial.lib.DeserializationRawComparator;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.thrift.TBase;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TCompactProtocol;
+import org.apache.thrift.protocol.TProtocol;
+
+/**
+ * Serialize using the compact Thrift representation.
+ */
+@SuppressWarnings("unchecked")
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class ThriftSerialization extends TypedSerialization<TBase> {
+ private final TProtocol protocol;
+ private final StreamTransport transport;
+
+ public ThriftSerialization() {
+ transport = new StreamTransport();
+ protocol = new TCompactProtocol(transport);
+ }
+
+ public ThriftSerialization(Class<? extends TBase> cls) {
+ this();
+ setSpecificType(cls);
+ }
+
+ @Override
+ public Class<TBase> getBaseType() {
+ return TBase.class;
+ }
+
+ @Override
+ public TBase deserialize(InputStream stream, TBase reusableObject,
+ Configuration conf) throws IOException {
+ transport.open(stream);
+ TBase result = reusableObject;
+ if (result == null) {
+ result = ReflectionUtils.newInstance(getSpecificType(), conf);
+ } else {
+ if (specificType != result.getClass()) {
+ throw new IOException("Type mismatch in deserialization: expected "
+ + specificType + "; received " + result.getClass());
+ }
+ }
+ try {
+ result.read(protocol);
+ transport.close();
+ } catch (TException te) {
+ transport.close();
+ throw new IOException("problem reading thrift object", te);
+ }
+ return result;
+ }
+
+ @Override
+ public RawComparator getRawComparator() {
+ return new DeserializationRawComparator(this, null);
+ }
+
+ @Override
+ public void serialize(OutputStream stream, TBase object) throws IOException {
+ if (specificType != object.getClass()) {
+ throw new IOException("Type mismatch in serialization: expected "
+ + specificType + "; received " + object.getClass());
+ }
+
+ transport.open(stream);
+ try {
+ object.write(protocol);
+ transport.close();
+ } catch (TException te) {
+ transport.close();
+ throw new IOException("problem writing thrift object", te);
+ }
+ }
+
+ @Override
+ public String getName() {
+ return "thrift";
+ }
+}
Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/package-info.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/package-info.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serial/package-info.java Sat Dec 4 07:13:10 2010
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package provides a generic interface to multiple serialization
+ * frameworks. The propoeraty "hadoop.serializations" defines a list of
+ * {@link org.apache.hadoop.io.serial.Serialization} objects. Each
+ * serialization has a name and associated metadata, which is interpreted by
+ * that serialization.
+ * <p>
+ * The system is pluggable, but the currently supported frameworks are:
+ * <ul>
+ * <li> Writable - the traditional Hadoop serialization
+ * <li> Protocol Buffers
+ * <li> Thrift
+ * <li> Avro
+ * <li> Java serialization - not recommended for real work loads
+ * </ul>
+ *
+ * The {@link org.apache.hadoop.io.serial.SerializationFactory} provides
+ * accessors for finding Serializations either by name or by type.
+ * Serializations associated with a set of types extend
+ * {@link org.apache.hadoop.io.serial.TypedSerialization} and can determine
+ * whether they can accept a given type. They are the default serialization
+ * for the types they accept.
+ * <p>
+ *
+ * To add a new serialization framework write an implementation of
+ * Serialization and add its name to the "hadoop.serializations" property.
+ *
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+package org.apache.hadoop.io.serial;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Deserializer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Deserializer.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Deserializer.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Deserializer.java Sat Dec 4 07:13:10 2010
@@ -36,9 +36,12 @@ import org.apache.hadoop.classification.
* {@link #deserialize(Object)}.
* </p>
* @param <T>
+ * @deprecated Use {@link org.apache.hadoop.io.serial.Serialization}
+ * instead.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving
+@Deprecated
public interface Deserializer<T> {
/**
* <p>Prepare the deserializer for reading.</p>
Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java Sat Dec 4 07:13:10 2010
@@ -38,9 +38,13 @@ import org.apache.hadoop.io.RawComparato
* on byte representations.
* </p>
* @param <T>
+ * @deprecated Use
+ * {@link org.apache.hadoop.io.serial.lib.DeserializationRawComparator}
+ * instead.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving
+@Deprecated
public abstract class DeserializerComparator<T> implements RawComparator<T> {
private InputBuffer buffer = new InputBuffer();
Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java Sat Dec 4 07:13:10 2010
@@ -24,20 +24,21 @@ import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.Serializable;
-import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.io.RawComparator;
/**
* <p>
* An experimental {@link Serialization} for Java {@link Serializable} classes.
* </p>
* @see JavaSerializationComparator
+ * @deprecated Use {@link org.apache.hadoop.io.serial.lib.JavaSerialization}
+ * instead.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
+@Deprecated
public class JavaSerialization implements Serialization<Serializable> {
static class JavaSerializationDeserializer<T extends Serializable>