You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2009/11/06 20:11:22 UTC

svn commit: r833528 - in /hadoop/hbase/branches/0.20: CHANGES.txt src/java/org/apache/hadoop/hbase/client/Get.java

Author: stack
Date: Fri Nov  6 19:11:21 2009
New Revision: 833528

URL: http://svn.apache.org/viewvc?rev=833528&view=rev
Log:
HBASE-1957 Get-s can't set a Filter

Modified:
    hadoop/hbase/branches/0.20/CHANGES.txt
    hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/Get.java

Modified: hadoop/hbase/branches/0.20/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/CHANGES.txt?rev=833528&r1=833527&r2=833528&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/CHANGES.txt (original)
+++ hadoop/hbase/branches/0.20/CHANGES.txt Fri Nov  6 19:11:21 2009
@@ -35,6 +35,7 @@
                looks like recursive loop
    HBASE-1949  KeyValue expiration by Time-to-Live during major compaction is
                broken (Gary Helmling via Stack)
+   HBASE-1957  Get-s can't set a Filter (Roman Kalyakin via Stack)
 
   IMPROVEMENTS
    HBASE-1899  Use scanner caching in shell count

Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/Get.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/Get.java?rev=833528&r1=833527&r2=833528&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/Get.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/Get.java Fri Nov  6 19:11:21 2009
@@ -28,12 +28,14 @@
 import java.util.TreeMap;
 import java.util.TreeSet;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableFactories;
 
 /**
  * Used to perform Get operations on a single row.
@@ -355,7 +357,8 @@
     this.maxVersions = in.readInt();
     boolean hasFilter = in.readBoolean();
     if (hasFilter) {
-      this.filter = (Filter)HbaseObjectWritable.readObject(in, null);
+      this.filter = (Filter)createForName(Bytes.toString(Bytes.readByteArray(in)));
+      this.filter.readFields(in);
     }
     this.tr = new TimeRange();
     tr.readFields(in);
@@ -387,7 +390,8 @@
       out.writeBoolean(false);
     } else {
       out.writeBoolean(true);
-      HbaseObjectWritable.writeObject(out, this.filter, Filter.class, null);
+      Bytes.writeByteArray(out, Bytes.toBytes(filter.getClass().getName()));
+      filter.write(out);
     }
     tr.write(out);
     out.writeInt(familyMap.size());
@@ -406,4 +410,15 @@
       }
     }
   }
+
+  @SuppressWarnings("unchecked")
+  private Writable createForName(String className) {
+    try {
+      Class<? extends Writable> clazz =
+        (Class<? extends Writable>) Class.forName(className);
+      return WritableFactories.newInstance(clazz, new Configuration());
+    } catch (ClassNotFoundException e) {
+      throw new RuntimeException("Can't find class " + className);
+    }    
+  }
 }