You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@cassandra.apache.org by ee...@apache.org on 2011/01/16 05:06:09 UTC

svn commit: r1059460 - in /cassandra/trunk: ./ interface/ src/java/org/apache/cassandra/hadoop/ src/java/org/apache/cassandra/hadoop/streaming/

Author: eevans
Date: Sun Jan 16 04:06:08 2011
New Revision: 1059460

URL: http://svn.apache.org/viewvc?rev=1059460&view=rev
Log:
move remaining avro records to hadoop package

Patch by eevans; reviewed by Jeremy Hanna for CASSANDRA-926

Added:
    cassandra/trunk/src/java/org/apache/cassandra/hadoop/hadoop.genavro
      - copied, changed from r1059458, cassandra/trunk/interface/cassandra.genavro
Removed:
    cassandra/trunk/interface/cassandra.genavro
Modified:
    cassandra/trunk/build.xml
    cassandra/trunk/src/java/org/apache/cassandra/hadoop/ColumnFamilyOutputFormat.java
    cassandra/trunk/src/java/org/apache/cassandra/hadoop/ColumnFamilyRecordWriter.java
    cassandra/trunk/src/java/org/apache/cassandra/hadoop/streaming/AvroOutputReader.java

Modified: cassandra/trunk/build.xml
URL: http://svn.apache.org/viewvc/cassandra/trunk/build.xml?rev=1059460&r1=1059459&r2=1059460&view=diff
==============================================================================
--- cassandra/trunk/build.xml (original)
+++ cassandra/trunk/build.xml Sun Jan 16 04:06:08 2011
@@ -38,7 +38,6 @@
     <property name="interface.dir" value="${basedir}/interface"/>
     <property name="interface.thrift.dir" value="${interface.dir}/thrift"/>
     <property name="interface.thrift.gen-java" value="${interface.thrift.dir}/gen-java"/>
-    <property name="interface.avro.dir" value="${interface.dir}/avro"/>
     <property name="test.dir" value="${basedir}/test"/>
     <property name="test.resources" value="${test.dir}/resources"/>
     <property name="test.lib" value="${build.dir}/test/lib"/>
@@ -119,7 +118,6 @@
         <delete dir="${build.test.dir}" />
         <delete dir="${build.classes}" />
         <delete dir="${build.src.gen-java}" />
-        <delete dir="${interface.avro.dir}" />
         <delete file="${build.dir}/internode.avpr" />
     </target>
     <target depends="clean" name="cleanall"/>
@@ -210,21 +208,23 @@
       <taskdef name="avro-protocol" classname="org.apache.avro.specific.ProtocolTask">
         <classpath refid="cassandra.classpath" />
       </taskdef>
-      <uptodate property="avroInterfaceUpToDate" srcfile="${interface.dir}/cassandra.genavro"
-                targetfile="${interface.avro.dir}/cassandra.avpr" />
+      <uptodate property="avroInterfaceUpToDate"
+                srcfile="${build.src.java}/org/apache/cassandra/hadoop/hadoop.genavro"
+                targetfile="${build.src.gen-java}/org/apache/cassandra/hadoop/hadoop.avpr" />
       <uptodate property="avroINProtoUpToDate" srcfile="${avro.src}/internode.genavro"
                 targetfile="${build.dir}/internode.avpr" />
     </target>
 
     <target name="avro-generate"
-            depends="avro-interface-generate-client,avro-interface-generate-internode"
+            depends="avro-interface-generate-hadoop,avro-interface-generate-internode"
             description="Generates Java Avro classes for client and internal use." />
 
-    <target name="avro-interface-generate-client" unless="avroInterfaceUpToDate"
+    <target name="avro-interface-generate-hadoop" unless="avroInterfaceUpToDate"
             depends="init,check-avro-generate,ivy-retrieve-build">
-      <!-- A copy for our build. -->
-      <avromacro protocolname="api" inputfile="${interface.dir}/cassandra.genavro"
-                 jsondir="${build.dir}" outputdir="${build.src}" />
+      <avromacro protocolname="hadoop"
+                 inputfile="${build.src.java}/org/apache/cassandra/hadoop/hadoop.genavro"
+                 jsondir="${build.src.gen-java}/org/apache/cassandra/hadoop"
+                 outputdir="${build.src}" />
     </target>
     <target name="avro-interface-generate-internode" unless="avroINProtoUpToDate"
             depends="init,check-avro-generate,ivy-retrieve-build">
@@ -362,8 +362,6 @@
       <copy todir="${dist.dir}/interface">
         <fileset dir="interface">
           <include name="**/*.thrift" />
-          <include name="cassandra.genavro" />
-          <include name="avro/cassandra.avpr" />
         </fileset>
       </copy>
       <copy todir="${dist.dir}/">
@@ -401,7 +399,6 @@
           <include name="**"/>
           <exclude name="build/**" />
           <exclude name="src/gen-java/**" />
-          <exclude name="interface/avro/**" />
           <exclude name=".git/**" />
           <exclude name="bin/*" /> <!-- handled separately below -->
           <!-- exclude Eclipse files -->

Modified: cassandra/trunk/src/java/org/apache/cassandra/hadoop/ColumnFamilyOutputFormat.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/hadoop/ColumnFamilyOutputFormat.java?rev=1059460&r1=1059459&r2=1059460&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/hadoop/ColumnFamilyOutputFormat.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/hadoop/ColumnFamilyOutputFormat.java Sun Jan 16 04:06:08 2011
@@ -31,7 +31,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.cassandra.auth.SimpleAuthenticator;
-import org.apache.cassandra.avro.Mutation;
+import org.apache.cassandra.hadoop.avro.Mutation;
 import org.apache.cassandra.thrift.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.*;

Modified: cassandra/trunk/src/java/org/apache/cassandra/hadoop/ColumnFamilyRecordWriter.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/hadoop/ColumnFamilyRecordWriter.java?rev=1059460&r1=1059459&r2=1059460&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/hadoop/ColumnFamilyRecordWriter.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/hadoop/ColumnFamilyRecordWriter.java Sun Jan 16 04:06:08 2011
@@ -59,8 +59,8 @@ import static org.apache.cassandra.io.Se
  * @see OutputFormat
  * 
  */
-final class ColumnFamilyRecordWriter extends RecordWriter<ByteBuffer,List<org.apache.cassandra.avro.Mutation>>
-implements org.apache.hadoop.mapred.RecordWriter<ByteBuffer,List<org.apache.cassandra.avro.Mutation>>
+final class ColumnFamilyRecordWriter extends RecordWriter<ByteBuffer,List<org.apache.cassandra.hadoop.avro.Mutation>>
+implements org.apache.hadoop.mapred.RecordWriter<ByteBuffer,List<org.apache.cassandra.hadoop.avro.Mutation>>
 {
     // The configuration this writer is associated with.
     private final Configuration conf;
@@ -118,7 +118,7 @@ implements org.apache.hadoop.mapred.Reco
      * @throws IOException
      */
     @Override
-    public void write(ByteBuffer keybuff, List<org.apache.cassandra.avro.Mutation> value) throws IOException
+    public void write(ByteBuffer keybuff, List<org.apache.cassandra.hadoop.avro.Mutation> value) throws IOException
     {
         Range range = ringCache.getRange(keybuff);
 
@@ -132,17 +132,17 @@ implements org.apache.hadoop.mapred.Reco
             clients.put(range, client);
         }
 
-        for (org.apache.cassandra.avro.Mutation amut : value)
+        for (org.apache.cassandra.hadoop.avro.Mutation amut : value)
             client.put(new Pair<ByteBuffer,Mutation>(keybuff, avroToThrift(amut)));
     }
 
     /**
      * Deep copies the given Avro mutation into a new Thrift mutation.
      */
-    private Mutation avroToThrift(org.apache.cassandra.avro.Mutation amut)
+    private Mutation avroToThrift(org.apache.cassandra.hadoop.avro.Mutation amut)
     {
         Mutation mutation = new Mutation();
-        org.apache.cassandra.avro.ColumnOrSuperColumn acosc = amut.column_or_supercolumn;
+        org.apache.cassandra.hadoop.avro.ColumnOrSuperColumn acosc = amut.column_or_supercolumn;
         if (acosc != null)
         {
             // creation
@@ -156,7 +156,7 @@ implements org.apache.hadoop.mapred.Reco
                 // super column
                 ByteBuffer scolname = acosc.super_column.name;
                 List<Column> scolcols = new ArrayList<Column>(acosc.super_column.columns.size());
-                for (org.apache.cassandra.avro.Column acol : acosc.super_column.columns)
+                for (org.apache.cassandra.hadoop.avro.Column acol : acosc.super_column.columns)
                     scolcols.add(avroToThrift(acol));
                 cosc.setSuper_column(new SuperColumn(scolname, scolcols));
             }
@@ -166,7 +166,7 @@ implements org.apache.hadoop.mapred.Reco
             // deletion
             Deletion deletion = new Deletion(amut.deletion.timestamp);
             mutation.setDeletion(deletion);
-            org.apache.cassandra.avro.SlicePredicate apred = amut.deletion.predicate;
+            org.apache.cassandra.hadoop.avro.SlicePredicate apred = amut.deletion.predicate;
             if (amut.deletion.super_column != null)
                 // super column
                 deletion.setSuper_column(copy(amut.deletion.super_column));
@@ -187,12 +187,12 @@ implements org.apache.hadoop.mapred.Reco
         return mutation;
     }
 
-    private SliceRange avroToThrift(org.apache.cassandra.avro.SliceRange asr)
+    private SliceRange avroToThrift(org.apache.cassandra.hadoop.avro.SliceRange asr)
     {
         return new SliceRange(asr.start, asr.finish, asr.reversed, asr.count);
     }
 
-    private Column avroToThrift(org.apache.cassandra.avro.Column acol)
+    private Column avroToThrift(org.apache.cassandra.hadoop.avro.Column acol)
     {
         return new Column(acol.name, acol.value, acol.timestamp);
     }

Copied: cassandra/trunk/src/java/org/apache/cassandra/hadoop/hadoop.genavro (from r1059458, cassandra/trunk/interface/cassandra.genavro)
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/hadoop/hadoop.genavro?p2=cassandra/trunk/src/java/org/apache/cassandra/hadoop/hadoop.genavro&p1=cassandra/trunk/interface/cassandra.genavro&r1=1059458&r2=1059460&rev=1059460&view=diff
==============================================================================
--- cassandra/trunk/interface/cassandra.genavro (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/hadoop/hadoop.genavro Sun Jan 16 04:06:08 2011
@@ -19,7 +19,7 @@
 /**
  * Cassandra client interface (legacy).
  */
-@namespace("org.apache.cassandra.avro")
+@namespace("org.apache.cassandra.hadoop.avro")
 
 protocol Cassandra {
     record Column {

Modified: cassandra/trunk/src/java/org/apache/cassandra/hadoop/streaming/AvroOutputReader.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/src/java/org/apache/cassandra/hadoop/streaming/AvroOutputReader.java?rev=1059460&r1=1059459&r2=1059460&view=diff
==============================================================================
--- cassandra/trunk/src/java/org/apache/cassandra/hadoop/streaming/AvroOutputReader.java (original)
+++ cassandra/trunk/src/java/org/apache/cassandra/hadoop/streaming/AvroOutputReader.java Sun Jan 16 04:06:08 2011
@@ -30,8 +30,8 @@ import java.util.List;
 import org.apache.avro.io.BinaryDecoder;
 import org.apache.avro.io.DecoderFactory;
 import org.apache.avro.specific.SpecificDatumReader;
-import org.apache.cassandra.avro.Mutation;
-import org.apache.cassandra.avro.StreamingMutation;
+import org.apache.cassandra.hadoop.avro.Mutation;
+import org.apache.cassandra.hadoop.avro.StreamingMutation;
 import org.apache.hadoop.streaming.PipeMapRed;
 import org.apache.hadoop.streaming.io.OutputReader;