You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2011/08/26 21:03:40 UTC
svn commit: r1162210 - in /hbase/trunk: CHANGES.txt
src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
src/main/ruby/shell.rb
Author: stack
Date: Fri Aug 26 19:03:40 2011
New Revision: 1162210
URL: http://svn.apache.org/viewvc?rev=1162210&view=rev
Log:
hbase-4248 Enhancements for Filter Language exposing HBase filters through the Thrift API
Modified:
hbase/trunk/CHANGES.txt
hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
hbase/trunk/src/main/ruby/shell.rb
Modified: hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hbase/trunk/CHANGES.txt?rev=1162210&r1=1162209&r2=1162210&view=diff
==============================================================================
--- hbase/trunk/CHANGES.txt (original)
+++ hbase/trunk/CHANGES.txt Fri Aug 26 19:03:40 2011
@@ -408,6 +408,8 @@ Release 0.91.0 - Unreleased
HBASE-4240 Allow Loadbalancer to be pluggable
HBASE-4244 Refactor bin/hbase help
HBASE-4241 Optimize flushing of the Memstore (Lars Hofhansl)
+ HBASE-4248 Enhancements for Filter Language exposing HBase filters through
+ the Thrift API (Anirudh Todi)
TASKS
HBASE-3559 Move report of split to master OFF the heartbeat channel
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java?rev=1162210&r1=1162209&r2=1162210&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java Fri Aug 26 19:03:40 2011
@@ -26,6 +26,7 @@ import java.util.TreeSet;
import java.util.ArrayList;
import java.util.Stack;
import java.util.HashMap;
+import java.util.Set;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables;
@@ -50,14 +51,50 @@ import java.util.EmptyStackException;
*/
public class ParseFilter {
- private HashMap<ByteBuffer, Integer> operatorPrecedenceHashMap;
+ private static HashMap<ByteBuffer, Integer> operatorPrecedenceHashMap;
+ private static HashMap<String, String> filterHashMap;
- /**
- * Constructor
- * <p>
- * Creates the operatorPrecedenceHashMap
- */
- public ParseFilter() {
+ static {
+ // Registers all the filter supported by the Filter Language
+ filterHashMap = new HashMap<String, String>();
+ filterHashMap.put("KeyOnlyFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "KeyOnlyFilter");
+ filterHashMap.put("FirstKeyOnlyFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "FirstKeyOnlyFilter");
+ filterHashMap.put("PrefixFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "PrefixFilter");
+ filterHashMap.put("ColumnPrefixFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "ColumnPrefixFilter");
+ filterHashMap.put("MultipleColumnPrefixFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "MultipleColumnPrefixFilter");
+ filterHashMap.put("ColumnCountGetFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "ColumnCountGetFilter");
+ filterHashMap.put("PageFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "PageFilter");
+ filterHashMap.put("ColumnPaginationFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "ColumnPaginationFilter");
+ filterHashMap.put("InclusiveStopFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "InclusiveStopFilter");
+ filterHashMap.put("TimestampsFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "TimestampsFilter");
+ filterHashMap.put("RowFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "RowFilter");
+ filterHashMap.put("FamilyFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "FamilyFilter");
+ filterHashMap.put("QualifierFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "QualifierFilter");
+ filterHashMap.put("ValueFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "ValueFilter");
+ filterHashMap.put("ColumnRangeFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "ColumnRangeFilter");
+ filterHashMap.put("SingleColumnValueFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "SingleColumnValueFilter");
+ filterHashMap.put("SingleColumnValueExcludeFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "SingleColumnValueExcludeFilter");
+ filterHashMap.put("DependentColumnFilter", ParseConstants.FILTER_PACKAGE + "." +
+ "DependentColumnFilter");
+
+ // Creates the operatorPrecedenceHashMap
operatorPrecedenceHashMap = new HashMap<ByteBuffer, Integer>();
operatorPrecedenceHashMap.put(ParseConstants.SKIP_BUFFER, 1);
operatorPrecedenceHashMap.put(ParseConstants.WHILE_BUFFER, 1);
@@ -199,8 +236,11 @@ public class ParseFilter {
String filterName = Bytes.toString(getFilterName(filterStringAsByteArray));
ArrayList<byte []> filterArguments = getFilterArguments(filterStringAsByteArray);
+ if (!filterHashMap.containsKey(filterName)) {
+ throw new IllegalArgumentException("Filter Name " + filterName + " not supported");
+ }
try {
- filterName = ParseConstants.FILTER_PACKAGE + "." + filterName;
+ filterName = filterHashMap.get(filterName);
Class c = Class.forName(filterName);
Class[] argTypes = new Class [] {ArrayList.class};
Method m = c.getDeclaredMethod("createFilterFromArguments", argTypes);
@@ -793,4 +833,11 @@ public class ParseFilter {
return result;
}
+
+/**
+ * Return a Set of filters supported by the Filter Language
+ */
+ public Set<String> getSupportedFilters () {
+ return filterHashMap.keySet();
+ }
}
Modified: hbase/trunk/src/main/ruby/shell.rb
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/ruby/shell.rb?rev=1162210&r1=1162209&r2=1162210&view=diff
==============================================================================
--- hbase/trunk/src/main/ruby/shell.rb (original)
+++ hbase/trunk/src/main/ruby/shell.rb Fri Aug 26 19:03:40 2011
@@ -227,6 +227,7 @@ Shell.load_command_group(
is_enabled
exists
list
+ show_filters
]
)