You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2010/01/05 18:26:53 UTC
svn commit: r896138 [1/9] - in /hadoop/hbase/branches/0.20: ./ src/contrib/
src/contrib/indexed/ src/contrib/indexed/lib/
src/contrib/indexed/lib/fmpp-0.19.14/ src/contrib/indexed/src/
src/contrib/indexed/src/fmpp/ src/contrib/indexed/src/fmpp/src/ src...
Author: stack
Date: Tue Jan 5 17:26:49 2010
New Revision: 896138
URL: http://svn.apache.org/viewvc?rev=896138&view=rev
Log:
HBASE-2037 Alternate indexed hbase implementation; speeds scans by adding indexes to regions rather secondary tables
Added:
hadoop/hbase/branches/0.20/src/contrib/indexed/
hadoop/hbase/branches/0.20/src/contrib/indexed/README.txt
hadoop/hbase/branches/0.20/src/contrib/indexed/build-fmpp.xml
hadoop/hbase/branches/0.20/src/contrib/indexed/build.xml
hadoop/hbase/branches/0.20/src/contrib/indexed/lib/
hadoop/hbase/branches/0.20/src/contrib/indexed/lib/easymock-2.5.2.LICENSE.txt
hadoop/hbase/branches/0.20/src/contrib/indexed/lib/easymock-2.5.2.jar (with props)
hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/
hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/bsh.jar (with props)
hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/fmpp.jar (with props)
hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/freemarker.jar (with props)
hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/oro.jar (with props)
hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/resolver.jar (with props)
hadoop/hbase/branches/0.20/src/contrib/indexed/src/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/data/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/data/types.csv
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/support/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/support/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/JmxHelper.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/WritableHelper.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxColumnDescriptor.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxIndexDescriptor.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxQualifierType.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxScan.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/And.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/Comparison.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/Compound.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/Expression.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/Or.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/package.html
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/CompleteIndex.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/CompleteIndexBuilder.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/EmptyIndex.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxExpressionEvaluator.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxIndex.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxRegion.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxRegionIndexManager.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxRegionMBean.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxRegionMBeanImpl.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxSearchContext.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/Bits.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/Callback.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/IdxClassSize.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/BigDecimalArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/BinarySearch.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ByteArrayArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ByteArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/CharArrayArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/CharArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/DoubleArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/FloatArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/IntegerArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/List.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/LongArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ObjectArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ShortArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/BitSet.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/IntSet.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/IntSetBase.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/IntSetBuilder.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/SparseBitSet.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/TestIdxHBaseCluster.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/TestIdxMasterAdmin.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/TestWritableHelper.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/TestIdxColumnDescriptor.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/TestIdxIndexDescriptor.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/TestIdxScan.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/exp/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/exp/TestComparison.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/exp/TestExpression.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestCompleteIndex.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestHRegionWithIdxRegion.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestHRegionWithIdxRegionNoIndexes.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestIdxExpressionEvaluator.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestIdxRegion.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestIdxRegionIndexManager.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestIdxRegionPerformance.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/TestBits.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/TestIdxClassSize.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestBigDecimalArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestByteArrayArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestByteArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestCharArrayArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestCharArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestDoubleArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestFloatArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestIntegerArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestLongArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestShortArrayList.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/sets/
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/sets/IntSetBaseTestCase.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/sets/TestBitSet.java
hadoop/hbase/branches/0.20/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/sets/TestSparseBitSet.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/MemStoreScanner.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java
hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/util/TestPair.java
Modified:
hadoop/hbase/branches/0.20/CHANGES.txt
hadoop/hbase/branches/0.20/src/contrib/build-contrib.xml
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/HConstants.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/HMerge.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/Scan.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/filter/FilterList.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/KeyValueSkipListSet.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/Store.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Bytes.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Pair.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Writables.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWrapper.java
hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/regionserver/TestStore.java
hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/util/TestBytes.java
Modified: hadoop/hbase/branches/0.20/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/CHANGES.txt?rev=896138&r1=896137&r2=896138&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/CHANGES.txt (original)
+++ hadoop/hbase/branches/0.20/CHANGES.txt Tue Jan 5 17:26:49 2010
@@ -40,7 +40,6 @@
(Lars George and Gary Helmling via Stack)
IMPROVEMENTS
- HBASE-1961 HBase EC2 scripts
HBASE-1970 Export does one version only; make it configurable how many
it does
HBASE-1975 SingleColumnValueFilter: Add ability to match the value of
@@ -87,6 +86,11 @@
HBASE-2021 Add compaction details to master UI
(Lars George via Stack)
+ NEW FEATURES
+ HBASE-1961 HBase EC2 scripts
+ HBASE-2037 Alternate indexed hbase implementation; speeds scans by adding
+ indexes to regions rather secondary tables
+
Release 0.20.2 - November 18th, 2009
INCOMPATIBLE CHANGES
Modified: hadoop/hbase/branches/0.20/src/contrib/build-contrib.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/build-contrib.xml?rev=896138&r1=896137&r2=896138&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/build-contrib.xml (original)
+++ hadoop/hbase/branches/0.20/src/contrib/build-contrib.xml Tue Jan 5 17:26:49 2010
@@ -257,6 +257,7 @@
errorProperty="tests.failed" failureProperty="tests.failed"
timeout="${test.timeout}">
+ <jvmarg value="-ea"/>
<sysproperty key="test.build.data" value="${build.test}/data"/>
<sysproperty key="build.test" value="${build.test}"/>
<sysproperty key="contrib.name" value="${name}"/>
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/README.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/README.txt?rev=896138&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/indexed/README.txt (added)
+++ hadoop/hbase/branches/0.20/src/contrib/indexed/README.txt Tue Jan 5 17:26:49 2010
@@ -0,0 +1,3 @@
+This contrib contains indexed hbase (IHBase).
+For how to use, include hbase-X.X.X-indexed.jar in your CLASSPATH and follow
+the instruction in javadoc under the respective packages: org.apache.hadoop.hbase.$foobar$.indexed.
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/build-fmpp.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/build-fmpp.xml?rev=896138&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/indexed/build-fmpp.xml (added)
+++ hadoop/hbase/branches/0.20/src/contrib/indexed/build-fmpp.xml Tue Jan 5 17:26:49 2010
@@ -0,0 +1,34 @@
+<?xml version="1.0"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!--
+ Used to generate code from fmpp templates. To use
+ # ant -f build-fmpp.xml -lib lib/fmpp-0.19.14
+-->
+<project name="indexed" default="generate">
+
+ <taskdef name="fmpp" classname="fmpp.tools.AntTask" />
+
+ <target name="generate">
+ <fmpp
+ sourceRoot="src/fmpp/src" outputRoot="src"
+ data="types:csv(data/types.csv)"
+ />
+ </target>
+</project>
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/build.xml?rev=896138&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/indexed/build.xml (added)
+++ hadoop/hbase/branches/0.20/src/contrib/indexed/build.xml Tue Jan 5 17:26:49 2010
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!--
+Before you can run these subtargets directly, you need
+to call at top-level: ant deploy-contrib compile-core-test
+-->
+<project name="indexed" default="jar">
+ <import file="../build-contrib.xml"/>
+
+</project>
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/easymock-2.5.2.LICENSE.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/lib/easymock-2.5.2.LICENSE.txt?rev=896138&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/indexed/lib/easymock-2.5.2.LICENSE.txt (added)
+++ hadoop/hbase/branches/0.20/src/contrib/indexed/lib/easymock-2.5.2.LICENSE.txt Tue Jan 5 17:26:49 2010
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/easymock-2.5.2.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/lib/easymock-2.5.2.jar?rev=896138&view=auto
==============================================================================
Binary file - no diff available.
Propchange: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/easymock-2.5.2.jar
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/bsh.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/bsh.jar?rev=896138&view=auto
==============================================================================
Binary file - no diff available.
Propchange: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/bsh.jar
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/fmpp.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/fmpp.jar?rev=896138&view=auto
==============================================================================
Binary file - no diff available.
Propchange: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/fmpp.jar
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/freemarker.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/freemarker.jar?rev=896138&view=auto
==============================================================================
Binary file - no diff available.
Propchange: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/freemarker.jar
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/oro.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/oro.jar?rev=896138&view=auto
==============================================================================
Binary file - no diff available.
Propchange: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/oro.jar
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/resolver.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/resolver.jar?rev=896138&view=auto
==============================================================================
Binary file - no diff available.
Propchange: hadoop/hbase/branches/0.20/src/contrib/indexed/lib/fmpp-0.19.14/resolver.jar
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/data/types.csv
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/data/types.csv?rev=896138&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/data/types.csv (added)
+++ hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/data/types.csv Tue Jan 5 17:26:49 2010
@@ -0,0 +1,11 @@
+displayName;clazz;primitive;kind
+Byte;Byte;byte;integer
+Char;Character;char;integer
+Short;Short;short;integer
+Integer;Integer;int;integer
+Long;Long;long;integer
+Float;Float;float;floatingPoint
+Double;Double;double;floatingPoint
+ByteArray;byte[];byte[];integerArray
+CharArray;char[];char[];integerArray
+BigDecimal;BigDecimal;BigDecimal;comparable
\ No newline at end of file
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ArrayList.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ArrayList.java?rev=896138&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ArrayList.java (added)
+++ hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ArrayList.java Tue Jan 5 17:26:49 2010
@@ -0,0 +1,500 @@
+<@pp.dropOutputFile />
+<#list types as T>
+<@pp.changeOutputFile name=T.displayName+"ArrayList.java" />
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver.idx.support.arrays;
+
+
+import org.apache.hadoop.hbase.util.Bytes;
+
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import org.apache.commons.lang.ArrayUtils;
+<#if T.clazz == "BigDecimal">
+import java.math.BigDecimal;
+</#if>
+<#if (T.kind == "integerArray" || T.kind == "comparable")>
+import org.apache.hadoop.hbase.util.ClassSize;
+</#if>
+
+/**
+ * A list designed to be used as the key store for indexed HBase.
+ * <p/>
+ * NOTE: This class is completely unsynchronised.
+ */
+public class ${T.displayName}ArrayList implements List<${T.clazz}> {
+
+<#if T.kind == "integerArray">
+ <#assign arrayPrimitive = T.primitive?substring(0,T.primitive?index_of('['))>
+</#if>
+
+ //DO NOT EDIT THIS FILE, EDIT THE FMPP TEMPLATE INSTEAD.
+ //To generate source execute
+ // **/src/contib/indexed# ant -f build-fmpp.xml -lib lib/fmpp-0.19.14
+
+ /**
+ * Default initial size of the array backing this list.
+ */
+ private static final int DEFAULT_SIZE = 1;
+
+ /**
+ * The scaling factor we use to resize the backing buffer when the list needs to grow.
+ */
+ private static final float SCALE_FACTOR = 1.5f;
+
+ /**
+ * The array backing this list.
+ */
+ private ${T.primitive}[] values;
+
+ /**
+ * The number of values present in the list.
+ */
+ private int size;
+
+<#if (T.kind == "integerArray")>
+ /**
+ * The accumulated heap size of elements stored in this list.
+ */
+ private long totalElementsHeapSize;
+</#if>
+
+ /**
+ * Constructor that initialises with the default size.
+ */
+ public ${T.displayName}ArrayList() {
+ this(DEFAULT_SIZE);
+ }
+
+ /**
+ * Constructor which initialises with the specified initial capacity.
+ *
+ * @param initialCapacity the initial capacity of the backing array
+ */
+ public ${T.displayName}ArrayList(int initialCapacity) {
+<#if T.kind == "integerArray">
+ values = new ${arrayPrimitive}[initialCapacity][];
+<#else>
+ values = new ${T.primitive}[initialCapacity];
+</#if>
+ }
+
+ /**
+ * Constructor which initialises the content from the supplied array list.
+ *
+ * @param initial the initial contents
+ */
+ public ${T.displayName}ArrayList(${T.displayName}ArrayList initial) {
+ // Initialise the internal storage to the appropriate size
+ this(initial.size);
+
+ // Copy over the references/values
+ System.arraycopy(initial.values, 0, this.values, 0, initial.size);
+ this.size = initial.size;
+ }
+
+ /**
+ * Adds the element to the end of the list.
+ *
+ * @param element the new element
+ */
+ public void add(${T.primitive} element) {
+ ensureCapacity(size + 1);
+ values[size] = element;
+ size++;
+<#if (T.kind == "integerArray")>
+ totalElementsHeapSize += ClassSize.ARRAY +
+ (element != null ? element.length * Bytes.SIZEOF_${arrayPrimitive?upper_case}: 0);
+</#if>
+ }
+
+<#if T.clazz != "byte[]">
+ @Override
+ public void add(byte[] bytes) {
+ add(fromBytes(bytes));
+ }
+</#if>
+
+<#if T.kind == "comparable">
+ @Override
+ public int compare(${T.clazz} needle, int compareToIndex) {
+ ${T.clazz} compareTo = values[compareToIndex];
+ return needle.compareTo(compareTo);
+ }
+<#elseif T.kind == "floatingPoint">
+ @Override
+ public int compare(${T.clazz} needle, int compareToIndex) {
+ ${T.primitive} compareTo = values[compareToIndex];
+ return ${T.clazz}.compare(needle, compareTo);
+ }
+<#elseif T.kind == "integer">
+ @Override
+ public int compare(${T.clazz} needle, int compareToIndex) {
+ ${T.primitive} compareTo = values[compareToIndex];
+ if (needle > compareTo) {
+ return 1;
+ } else if (needle < compareTo) {
+ return -1;
+ } else {
+ return 0;
+ }
+ }
+<#elseif T.kind == "integerArray">
+ @Override
+ public int compare(${T.clazz} needle, int compareToIndex) {
+ ${T.primitive} compareTo = values[compareToIndex];
+ int length = Math.min(needle.length, compareTo.length);
+ for (int i = 0; i < length; i++) {
+ if (needle[i] != compareTo[i]) {
+ if (needle[i] > compareTo[i]) {
+ return 1;
+ } else if (needle[i] < compareTo[i]) {
+ return -1;
+ }
+ }
+ }
+
+ return needle.length - compareTo.length;
+ }
+</#if>
+
+ /**
+ * Grows the backing array to the requested size.
+ *
+ * @param requested the new capacity.
+ */
+ private void ensureCapacity(int requested) {
+ // If we need to resize
+ if (requested > values.length) {
+ // Calculate the new size, growing slowly at the start to avoid overallocation too early.
+ int newSize = Math.max(requested, (int) (values.length * SCALE_FACTOR + 1));
+
+<#if T.kind == "integerArray">
+ ${T.primitive}[] newValues = new ${arrayPrimitive}[newSize][];
+<#else>
+ // Create the new array
+ ${T.primitive}[] newValues = new ${T.primitive}[newSize];
+</#if>
+
+ // Populate the new backing array
+ System.arraycopy(values, 0, newValues, 0, size);
+ values = newValues;
+ }
+ }
+
+ /**
+ * Retrieves the element at the requested index.
+ *
+ * @param index the element index you wish to retrieve
+ * @return the value at that index
+ */
+ public ${T.primitive} get(int index) {
+ if (index >= size) {
+ throw new ArrayIndexOutOfBoundsException("Attempted to access index " + index + " but array is " + size + " elements");
+ }
+
+ return values[index];
+ }
+
+ /**
+ * Searches the list for the nominated value.
+ *
+ * @param searchFor the value you are looking for
+ * @return the first index the value was found at or -1 if not found
+ */
+ public int indexOf(${T.primitive} searchFor) {
+ // Check each of the values. Don't bother with get() since we don't need its protection.
+ for (int i = 0; i < size; i++) {
+<#if T.kind == "integerArray">
+ if (Arrays.equals(values[i], searchFor)) {
+<#elseif T.kind == "comparable">
+ if (values[i].equals(searchFor)) {
+<#else>
+ if (values[i] == searchFor) {
+</#if>
+ return i;
+ }
+ }
+
+ // Didn't find it.
+ return -1;
+ }
+
+ /**
+ * Simple iterator that runs over the values in the list.
+ */
+ private static final class InternalIterator
+ implements Iterator<${T.clazz}> {
+
+ private ${T.primitive}[] values;
+ private int size;
+ private int current = 0;
+
+ private InternalIterator(${T.primitive}[] values, int size) {
+ this.values = values;
+ this.size = size;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean hasNext() {
+ return current < size;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public ${T.clazz} next() {
+ if (!hasNext()) {
+ throw new NoSuchElementException();
+ }
+ return values[current++];
+ }
+
+ /**
+ * Not supported.
+ */
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException("remove() is not supported");
+ }
+ }
+
+ /**
+ * Returns an iterator over the underlying content. Note that this is completely unsynchronised and the contents can change under you.
+ */
+ @Override
+ public Iterator<${T.clazz}> iterator() {
+ return new InternalIterator(values, size);
+ }
+
+ /**
+ * Checks if the list is empty.
+ *
+ * @return true if the list is empty
+ */
+ @Override
+ public boolean isEmpty() {
+ return size == 0;
+ }
+
+ /**
+ * Sets the specified index to the nominated value.
+ *
+ * @param index the list index
+ * @param newValue the value
+ */
+ public void set(int index, ${T.primitive} newValue) {
+ if (index >= size) {
+ throw new ArrayIndexOutOfBoundsException("Attempted to access index " + index + " but array is " + size + " elements");
+ }
+<#if (T.kind == "integerArray")>
+ totalElementsHeapSize -= ClassSize.ARRAY +
+ (values[index] != null ? values[index].length * Bytes.SIZEOF_${arrayPrimitive?upper_case}: 0);
+</#if>
+
+ values[index] = newValue;
+
+<#if (T.kind == "integerArray")>
+ totalElementsHeapSize += ClassSize.ARRAY +
+ (newValue != null ? newValue.length * Bytes.SIZEOF_${arrayPrimitive?upper_case}: 0);
+</#if>
+ }
+
+<#if T.clazz != "byte[]">
+ @Override
+ public void set(int index, byte[] newValue) {
+ set(index, fromBytes(newValue));
+ }
+</#if>
+
+ /**
+ * Removes the specified index from the list.
+ *
+ * @param index the index to remove
+ * @return the original value
+ */
+ public ${T.primitive} remove(int index) {
+ if (index >= size) {
+ throw new ArrayIndexOutOfBoundsException("Attempted to access index " + index + " but array is " + size + " elements");
+ }
+
+ ${T.primitive} original = values[index];
+ System.arraycopy(values, index + 1, values, index, size - index - 1);
+ size--;
+<#if (T.kind == "integerArray")>
+ totalElementsHeapSize -= ClassSize.ARRAY +
+ (original != null ? original.length * Bytes.SIZEOF_${arrayPrimitive?upper_case}: 0);
+</#if>
+ return original;
+ }
+
+
+ /**
+ * Inserts at the specified index to the list.
+ *
+ * @param index the index to insert
+ * @param newValue the value to insert
+ */
+ public void insert(int index, ${T.primitive} newValue) {
+ if (index > size) {
+ throw new ArrayIndexOutOfBoundsException("Attempted to access index " + index + " but array is " + size + " elements");
+ }
+
+ ensureCapacity(size + 1);
+ if (index != size) {
+ System.arraycopy(values, index, values, index + 1, size - index);
+ }
+ values[index] = newValue;
+ size++;
+<#if (T.kind == "integerArray")>
+ totalElementsHeapSize += ClassSize.ARRAY +
+ (newValue != null ? newValue.length * Bytes.SIZEOF_${arrayPrimitive?upper_case}: 0);
+</#if>
+ }
+
+<#if T.clazz != "byte[]">
+ @Override
+ public void insert(int index, byte[] newValue) {
+ insert(index, fromBytes(newValue));
+ }
+</#if>
+
+ /**
+ * Removes the last item in the list.
+ *
+ * @return the original value
+ */
+ public ${T.primitive} removeLast() {
+ if (size < 1) {
+ throw new ArrayIndexOutOfBoundsException("Attempted to remove last element from array with size 0");
+ }
+
+ ${T.primitive} result = values[size - 1];
+ size--;
+ <#if (T.kind == "integerArray")>
+ values[size] = null;
+ totalElementsHeapSize -= ClassSize.ARRAY +
+ (result != null ? result.length * Bytes.SIZEOF_${arrayPrimitive?upper_case}: 0);
+ </#if>
+
+
+ return result;
+ }
+
+ /**
+ * Returns the current number of elements in this list.
+ *
+ * @return the number of elements.
+ */
+ public int size() {
+ return size;
+ }
+
+<#if T.clazz == "Byte">
+ @Override
+ public ${T.clazz} fromBytes(byte[] bytes) {
+ assert bytes.length == 1;
+ return bytes[0];
+ }
+<#elseif T.clazz == "byte[]">
+ @Override
+ public ${T.clazz} fromBytes(byte[] bytes) {
+ return bytes;
+ }
+<#elseif T.clazz == "char[]">
+ @Override
+ public ${T.clazz} fromBytes(byte[] bytes) {
+ return Bytes.to${arrayPrimitive?cap_first}s(bytes);
+ }
+<#else>
+ @Override
+ public ${T.clazz} fromBytes(byte[] bytes) {
+ return Bytes.to${T.primitive?cap_first}(bytes);
+ }
+</#if>
+
+
+<#if (T.kind == "integerArray")>
+ @Override
+ public long heapSize() {
+ return FIXED_OVERHEAD + Bytes.SIZEOF_LONG +
+ ClassSize.REFERENCE * values.length + totalElementsHeapSize;
+ }
+<#elseif T.clazz == "BigDecimal">
+ @Override
+ public long heapSize() {
+ // take a rough estimate that a big decimal's overhead is 50 bytes.
+ // TODO fix
+ return FIXED_OVERHEAD + Bytes.SIZEOF_LONG +
+ (ClassSize.REFERENCE + 50) * values.length;
+ }
+<#else>
+ @Override
+ public long heapSize() {
+ return FIXED_OVERHEAD + Bytes.SIZEOF_${T.primitive?upper_case} * values.length;
+ }
+</#if>
+
+
+ /**
+ * Return a nice view of the list.
+ * {@inheritDoc}
+ */
+ @Override
+ public String toString() {
+ return Arrays.toString(Arrays.copyOf(values, size));
+ }
+
+ /**
+ * Checks the contents of the collection for equality.
+ * <p/>
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object compareTo) {
+ if (this == compareTo) {
+ return true;
+ }
+ if (!(compareTo instanceof ${T.displayName}ArrayList)) {
+ return false;
+ }
+
+ ${T.displayName}ArrayList that = (${T.displayName}ArrayList) compareTo;
+
+ return this.size == that.size &&
+ ArrayUtils.isEquals(this.values, that.values);
+ }
+
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return 31 * Arrays.hashCode(values) + size;
+ }
+}
+</#list>
\ No newline at end of file
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestArrayList.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestArrayList.java?rev=896138&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestArrayList.java (added)
+++ hadoop/hbase/branches/0.20/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestArrayList.java Tue Jan 5 17:26:49 2010
@@ -0,0 +1,409 @@
+<@pp.dropOutputFile />
+<#list types as T>
+<@pp.changeOutputFile name="Test"+T.displayName+"ArrayList.java" />
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver.idx.support.arrays;
+
+import junit.framework.Assert;
+import org.apache.hadoop.hbase.HBaseTestCase;
+import org.apache.commons.lang.ArrayUtils;
+
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+<#if (T.kind == "integerArray")>
+import java.util.Arrays;
+</#if>
+<#if T.clazz == "BigDecimal">
+import java.math.BigDecimal;
+</#if>
+
+
+public class Test${T.displayName}ArrayList extends HBaseTestCase {
+
+<#if (T.kind == "integerArray")>
+<#macro assertEquals expr1 expr2>
+ Assert.assertTrue(Arrays.equals(${expr1}, ${expr2}));
+</#macro>
+<#macro assertNotEquals expr1 expr2>
+ Assert.assertFalse(Arrays.equals(${expr1}, ${expr2}));
+</#macro>
+<#else>
+<#macro assertEquals expr1 expr2>
+ Assert.assertEquals(${expr1}, ${expr2});
+</#macro>
+<#macro assertNotEquals expr1 expr2>
+ Assert.assertFalse(${expr1}.equals(${expr2}));
+</#macro>
+</#if>
+
+
+ private static final int[] INVALID_INDEXES = {0, -1, 1};
+
+<#if (T.kind == "integer" || T.kind == "floatingPoint")>
+ <#assign zero = "("+T.primitive+") 0">
+ <#assign one = "("+T.primitive+") 1">
+ <#assign two = "("+T.primitive+") 2">
+ <#assign three = "("+T.primitive+") 3">
+ <#assign four = "("+T.primitive+") 4">
+ <#assign five = "("+T.primitive+") 5">
+ <#assign forty_one = "("+T.primitive+") 41">
+ <#assign forty_two = "("+T.primitive+") 42">
+ <#assign forty_three = "("+T.primitive+") 43">
+<#elseif (T.kind == "integerArray")>
+ <#assign zero="new "+T.primitive+"{0}">
+ <#assign one="new "+T.primitive+"{1}">
+ <#assign two="new "+T.primitive+"{2}">
+ <#assign three="new "+T.primitive+"{3}">
+ <#assign four="new "+T.primitive+"{4}">
+ <#assign five="new "+T.primitive+"{5}">
+ <#assign forty_one="new "+T.primitive+"{4, 1, 5, 6}">
+ <#assign forty_two="new "+T.primitive+"{4, 2, 3}">
+ <#assign forty_three="new "+T.primitive+"{4, 3, 2}">
+<#else>
+ <#assign zero="new "+T.clazz+"(0)">
+ <#assign one="new "+T.clazz+"(1)">
+ <#assign two="new "+T.clazz+"(2)">
+ <#assign three="new "+T.clazz+"(3)">
+ <#assign four="new "+T.clazz+"(4)">
+ <#assign five="new "+T.clazz+"(5)">
+ <#assign forty_one="new "+T.clazz+"(41)">
+ <#assign forty_two="new "+T.clazz+"(42)">
+ <#assign forty_three="new "+T.clazz+"(43)">
+</#if>
+ /**
+ * Verifies that the initial size constructor initialises as expected.
+ */
+ public void testInitialSizeAndEmpty() {
+ ${T.displayName}ArrayList test = new ${T.displayName}ArrayList();
+ checkSizeAndCapacity(test, 0, 1);
+ Assert.assertTrue(test.isEmpty());
+
+ test = new ${T.displayName}ArrayList(1000);
+ checkSizeAndCapacity(test, 0, 1000);
+ Assert.assertTrue(test.isEmpty());
+
+ test.add(${five});
+ Assert.assertFalse(test.isEmpty());
+ }
+
+ /**
+ * Verifies copy constructor.
+ */
+ public void testCopyConstructor() {
+ // Create an original with a capacity of 2, but only one entry
+ ${T.displayName}ArrayList original = new ${T.displayName}ArrayList(2);
+ original.add(${one});
+ ${T.primitive}[] values = (${T.primitive}[]) getField(original, "values");
+ Assert.assertEquals(values.length, 2);
+ Assert.assertEquals(original.size(), 1);
+
+ // Create a copy of the original and check that its size + capacity are the minimum
+ ${T.displayName}ArrayList copy = new ${T.displayName}ArrayList(original);
+ Assert.assertEquals(copy.size(), 1);
+ <@assertEquals expr1="copy.get(0)" expr2=one />
+ values = (${T.primitive}[]) getField(copy, "values");
+ Assert.assertEquals(values.length, 1);
+ }
+
+ /**
+ * Ensures the equals() method behaves as expected.
+ */
+ public void testEquals() {
+ ${T.displayName}ArrayList test1a = new ${T.displayName}ArrayList();
+ test1a.add(${one});
+ ${T.displayName}ArrayList test1b = new ${T.displayName}ArrayList();
+ test1b.add(${one});
+ ${T.displayName}ArrayList test2 = new ${T.displayName}ArrayList();
+ test2.add(${two});
+
+ Assert.assertTrue(test1a.equals(test1b));
+ Assert.assertFalse(test1a.equals(test2));
+ }
+
+
+ /**
+ * Ensures the number of elements in the list and its backing capacity are what we expect.
+ *
+ * @param test the list to test
+ * @param size the expected number of elements in the list
+ * @param capacity the expected capacity
+ */
+ private void checkSizeAndCapacity(${T.displayName}ArrayList test, int size, int capacity) {
+ Assert.assertEquals(test.size(), size);
+
+ ${T.primitive}[] values = (${T.primitive}[]) getField(test, "values");
+
+ Assert.assertEquals(values.length, capacity);
+ }
+
+ /**
+ * Tests that adding elements grows the array size and capacity as expected.
+ */
+ public void testAddGetAndGrow() {
+ // Initialise
+ ${T.displayName}ArrayList test = new ${T.displayName}ArrayList();
+ checkSizeAndCapacity(test, 0, 1);
+
+ // Add the first element and we expect the capacity to be unchanged since we don't have any spots consumed.
+ test.add(${one});
+ <@assertEquals expr1="test.get(0)" expr2=one />
+ checkSizeAndCapacity(test, 1, 1);
+
+ // Add the next element and we expect the capacity to grow by one only
+ test.add(${two});
+ <@assertEquals expr1="test.get(1)" expr2=two />
+ checkSizeAndCapacity(test, 2, 2);
+
+ // Add the next element and we expect the capacity to grow by two
+ test.add(${three});
+ <@assertEquals expr1="test.get(2)" expr2=three />
+ checkSizeAndCapacity(test, 3, 4);
+
+ // Add the next element and we expect the capacity to be unchanged
+ test.add(${four});
+ <@assertEquals expr1="test.get(3)" expr2=four />
+ checkSizeAndCapacity(test, 4, 4);
+
+ // Add the next element and we expect the capacity to be 1.5+1 times larger
+ test.add(${five});
+ <@assertEquals expr1="test.get(4)" expr2=five />
+ checkSizeAndCapacity(test, 5, 7);
+ }
+
+ /**
+ * Tests get() with various invalid ranges.
+ */
+ public void testInvalidGet() {
+ for (int index : INVALID_INDEXES) {
+ try {
+ ${T.displayName}ArrayList test = new ${T.displayName}ArrayList();
+ test.get(index);
+ } catch (ArrayIndexOutOfBoundsException ignored) {
+ continue;
+ }
+ Assert.fail("Expected an array index out of bounds exception");
+ }
+ }
+
+
+ /**
+ * Tests the indexOf() and set() methods.
+ */
+ public void testIndexOfAndSet() {
+ ${T.displayName}ArrayList test = new ${T.displayName}ArrayList();
+
+ // Test with first value added to list
+ ${T.primitive} testValue = ${forty_two};
+ Assert.assertEquals(test.indexOf(testValue), -1);
+ test.add(testValue);
+ Assert.assertEquals(test.indexOf(testValue), 0);
+
+ // Add a second one
+ testValue = ${forty_three};
+ Assert.assertEquals(test.indexOf(testValue), -1);
+ test.add(testValue);
+ Assert.assertEquals(test.indexOf(testValue), 1);
+
+ // Change the first to a new value
+ testValue = ${forty_one};
+ Assert.assertEquals(test.indexOf(testValue), -1);
+ test.set(0, testValue);
+ Assert.assertEquals(test.indexOf(testValue), 0);
+ }
+
+ /**
+ * Tests the Searchable implementation.
+ */
+ public void testSearchable() {
+<#if T.clazz != "byte[]">
+ ${T.displayName}ArrayList test = new ${T.displayName}ArrayList();
+<#else>
+ List<?> test = new ${T.displayName}ArrayList();
+</#if>
+
+ // Test with first value added to list
+ ${T.primitive} testValue = ${forty_two};
+ Assert.assertEquals(BinarySearch.search(test, test.size(), testValue), -1);
+ test.add(testValue);
+ Assert.assertEquals(BinarySearch.search(test, test.size(), testValue), 0);
+
+ // Add a second one
+ testValue = ${forty_three};
+ Assert.assertEquals(BinarySearch.search(test, test.size(), testValue), -2);
+ test.add(testValue);
+ Assert.assertEquals(BinarySearch.search(test, test.size(), testValue), 1);
+
+ // Search for something off the start
+ testValue = ${forty_one};
+ Assert.assertEquals(BinarySearch.search(test, test.size(), testValue), -1);
+ }
+
+ /**
+ * Tests set() with various invalid ranges.
+ */
+ public void testInvalidSet() {
+ for (int index : INVALID_INDEXES) {
+ try {
+ ${T.displayName}ArrayList test = new ${T.displayName}ArrayList();
+ test.set(index, ${zero});
+ } catch (ArrayIndexOutOfBoundsException ignored) {
+ continue;
+ }
+ Assert.fail("Expected an array index out of bounds exception");
+ }
+ }
+
+
+ /**
+ * Tests iteration via the Iterable interface.
+ */
+ public void testIterable() {
+ final java.util.List<${T.clazz}> testData = new ArrayList<${T.clazz}>();
+
+ // Test with no content first
+ ${T.displayName}ArrayList test = new ${T.displayName}ArrayList();
+ testData.clear();
+ for (${T.primitive} item : test) {
+ testData.add(item);
+ }
+ Assert.assertEquals(testData.size(), 0);
+
+ // Add a value and ensure it is returned
+ test.add(${one});
+ testData.clear();
+ for (${T.primitive} item : test) {
+ testData.add(item);
+ }
+ Assert.assertTrue(ArrayUtils.isEquals(testData.toArray(),
+ new Object[]{${one}}));
+
+ // Add another value and ensure it is returned
+ test.add(${one});
+ testData.clear();
+ for (${T.primitive} item : test) {
+ testData.add(item);
+ }
+ Assert.assertTrue(ArrayUtils.isEquals(testData.toArray(),
+ new Object[]{${one}, ${one}}));
+ }
+
+ /**
+ * Tests the remove() method.
+ */
+ public void testRemove() {
+ ${T.displayName}ArrayList test = new ${T.displayName}ArrayList();
+ test.add(${one});
+ <@assertEquals expr1="test.get(0)" expr2=one />
+ //Assert.assertEquals(test.get(0), ${one});
+ test.remove(0);
+ Assert.assertTrue(test.isEmpty());
+
+ // Add some
+ test.add(${zero});
+ test.add(${one});
+ test.add(${two});
+
+ // Remove a value from the middle and ensure correct operation
+ <@assertEquals expr1="test.remove(1)" expr2=one />
+ <@assertEquals expr1="test.get(0)" expr2=zero />
+ <@assertEquals expr1="test.get(1)" expr2=two />
+ }
+
+ /**
+ * Tests the remove() method.
+ */
+ public void testInsert() {
+ ${T.displayName}ArrayList test = new ${T.displayName}ArrayList();
+ test.insert(0, ${one});
+ <@assertEquals expr1="test.get(0)" expr2=one />
+ Assert.assertEquals(test.size(), 1);
+
+ test.insert(0, ${zero});
+ <@assertEquals expr1="test.get(0)" expr2=zero />
+ <@assertEquals expr1="test.get(1)" expr2=one/>
+ Assert.assertEquals(test.size(), 2);
+
+ test.insert(1, ${two});
+ <@assertEquals expr1="test.get(0)" expr2=zero />
+ <@assertEquals expr1="test.get(1)" expr2=two />
+ <@assertEquals expr1="test.get(2)" expr2=one />
+ Assert.assertEquals(test.size(), 3);
+
+ test.insert(3, ${three});
+ <@assertEquals expr1="test.get(0)" expr2=zero />
+ <@assertEquals expr1="test.get(1)" expr2=two />
+ <@assertEquals expr1="test.get(2)" expr2=one />
+ <@assertEquals expr1="test.get(3)" expr2=three />
+ Assert.assertEquals(test.size(), 4);
+ }
+
+ /**
+ * Verifies the removeLast() method works as expected.
+ */
+ public void testRemoveLast() {
+ ${T.displayName}ArrayList test = new ${T.displayName}ArrayList();
+ test.add(${one});
+ test.add(${two});
+
+ <@assertEquals expr1="test.removeLast()" expr2=two />
+ <@assertEquals expr1="test.get(0)" expr2=one />
+
+ <@assertEquals expr1="test.removeLast()" expr2=one />
+ Assert.assertTrue(test.isEmpty());
+ }
+
+ /**
+ * Tests remove() with various invalid ranges.
+ */
+ public void testInvalidRemove() {
+ for (int index : INVALID_INDEXES) {
+ try {
+ ${T.displayName}ArrayList test = new ${T.displayName}ArrayList();
+ test.remove(index);
+ } catch (ArrayIndexOutOfBoundsException ignored) {
+ continue;
+ }
+ Assert.fail("Expected an array index out of bounds exception");
+ }
+ }
+
+ /**
+ * Extracts a declared field from a given object.
+ *
+ * @param target the object from which to extract the field
+ * @param name the name of the field
+ * @return the declared field
+ */
+ public static Object getField(Object target, String name) {
+ try {
+ Field field = target.getClass().getDeclaredField(name);
+ field.setAccessible(true);
+ return field.get(target);
+ } catch (IllegalAccessException e) {
+ Assert.fail("Exception " + e);
+ } catch (NoSuchFieldException e) {
+ Assert.fail("Exception " + e);
+ }
+ return null;
+ }
+
+}
+</#list>
\ No newline at end of file
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/JmxHelper.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/JmxHelper.java?rev=896138&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/JmxHelper.java (added)
+++ hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/JmxHelper.java Tue Jan 5 17:26:49 2010
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import javax.management.InstanceAlreadyExistsException;
+import javax.management.InstanceNotFoundException;
+import javax.management.MBeanRegistrationException;
+import javax.management.MBeanServer;
+import javax.management.NotCompliantMBeanException;
+import javax.management.ObjectName;
+import java.lang.management.ManagementFactory;
+
+/**
+ * Utilities for JMX.
+ */
+public final class JmxHelper {
+ static final Log LOG = LogFactory.getLog(JmxHelper.class);
+
+
+ private JmxHelper() {
+ // private constuctor for utility classes.
+ }
+
+ /**
+ * Registers an MBean with the platform MBean server. if an MBean with the
+ * same name exists it will be unregistered and the provided MBean would
+ * replace it
+ *
+ * @param objectName the object name
+ * @param mbean the mbean class
+ */
+ public static void registerMBean(ObjectName objectName, Object mbean) {
+ final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+ if (mbs.isRegistered(objectName)) {
+ try {
+ LOG.info("unregister: "+objectName);
+ mbs.unregisterMBean(objectName);
+ } catch (InstanceNotFoundException e) {
+ throw new IllegalStateException("mbean " + objectName +
+ " failed unregistration", e);
+ } catch (MBeanRegistrationException e) {
+ throw new IllegalStateException("mbean " + objectName +
+ " failed unregistration", e);
+ }
+ }
+ try {
+ LOG.info("register: " + objectName);
+ mbs.registerMBean(mbean, objectName);
+ } catch (InstanceAlreadyExistsException e) {
+ throw new IllegalStateException("mbean " + objectName +
+ " failed registration", e);
+ } catch (MBeanRegistrationException e) {
+ throw new IllegalStateException("mbean " + objectName +
+ " failed registration", e);
+ } catch (NotCompliantMBeanException e) {
+ throw new IllegalStateException("mbean " + objectName +
+ " failed registration", e);
+ }
+ }
+}
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/WritableHelper.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/WritableHelper.java?rev=896138&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/WritableHelper.java (added)
+++ hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/WritableHelper.java Tue Jan 5 17:26:49 2010
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.Writable;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+/**
+ * A collection of writable utils.
+ */
+public class WritableHelper {
+ private WritableHelper() {
+ }
+
+ /**
+ * Helper method to instantiate an expression instance using the provided
+ * className.
+ * @param className the class name
+ * @param baseClass the base class type (the class must be or inherit from
+ * this type)
+ * @return the instance
+ */
+ @SuppressWarnings("unchecked")
+ public static <T extends Writable> T instanceForName(String className, Class<T> baseClass) {
+ try {
+ Class<T> clazz = (Class<T>) Class.forName(className);
+ return clazz.newInstance();
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Can't find or instantiate class " + className, e);
+ }
+ }
+
+ /**
+ * Reads an instance of provided clazz (or one of it's subclasses) from the
+ * provided data input.
+ * @param in the data into
+ * @param clazz the class that the instance will be or extend from
+ * @param <T> the type
+ * @return the instance
+ * @throws IOException if an io error occurs
+ */
+ public static <T extends Writable> T readInstance(DataInput in, Class<T> clazz) throws IOException {
+ String className = Bytes.toString(Bytes.readByteArray(in));
+ T instance = instanceForName(className, clazz);
+ instance.readFields(in);
+ return instance;
+ }
+
+ /**
+ * Reads an instance of provided clazz (or one of it's subclasses) from the
+ * provided data input.
+ * <p/>
+ * <p>Note: It's assumed that the {@link #writeInstanceNullable(java.io.DataOutput,
+ * org.apache.hadoop.io.Writable)} method was used to write out the instance.
+ * @param in the data into
+ * @param clazz the class that the instance will be or extend from
+ * @param <T> the type
+ * @return the instance (or null)
+ * @throws IOException if an io error occurs
+ */
+ public static <T extends Writable> T readInstanceNullable(DataInput in, Class<T> clazz) throws IOException {
+ if (in.readBoolean()) {
+ return readInstance(in, clazz);
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Writes out the provided writable instance to the data outout.
+ * @param out the data output
+ * @param writable the writable isntance (must not be null)
+ * @throws IOException if an io error occurs
+ */
+ public static void writeInstance(DataOutput out, Writable writable) throws IOException {
+ if (writable == null) {
+ throw new IllegalArgumentException("The writable instance must not be null");
+ }
+ Bytes.writeByteArray(out, Bytes.toBytes(writable.getClass().getName()));
+ writable.write(out);
+ }
+
+ /**
+ * Writes out the provided writable instance to the data outout.
+ * @param out the data output
+ * @param writable the writable isntance (can be null)
+ * @throws IOException if an io error occurs
+ */
+ public static void writeInstanceNullable(DataOutput out, Writable writable) throws IOException {
+ if (writable == null) {
+ out.writeBoolean(false);
+ } else {
+ out.writeBoolean(true);
+ writeInstance(out, writable);
+ }
+ }
+}
Added: hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxColumnDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxColumnDescriptor.java?rev=896138&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxColumnDescriptor.java (added)
+++ hadoop/hbase/branches/0.20/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxColumnDescriptor.java Tue Jan 5 17:26:49 2010
@@ -0,0 +1,280 @@
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client.idx;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.WritableHelper;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.io.DataInputBuffer;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Collection;
+
+/**
+ * An extension of the {@link org.apache.hadoop.hbase.HColumnDescriptor} that
+ * adds the ability to define indexes on column family qualifiers.
+ * <p/>
+ */
+public class IdxColumnDescriptor extends HColumnDescriptor {
+ /**
+ * The key used to store and retrieve index descriptors.
+ */
+ public static final ImmutableBytesWritable INDEX_DESCRIPTORS =
+ new ImmutableBytesWritable(Bytes.toBytes("INDEX_DESC"));
+
+ /**
+ * Constructor.
+ * @see org.apache.hadoop.hbase.HColumnDescriptor#HColumnDescriptor()
+ */
+ public IdxColumnDescriptor() {
+ super();
+ }
+
+ /**
+ * Constructor.
+ * @see org.apache.hadoop.hbase.HColumnDescriptor#HColumnDescriptor(String)
+ */
+ public IdxColumnDescriptor(String familyName) {
+ super(familyName);
+ }
+
+ /**
+ * Constructor.
+ * @see org.apache.hadoop.hbase.HColumnDescriptor#HColumnDescriptor(byte[])
+ */
+ public IdxColumnDescriptor(byte[] familyName) {
+ super(familyName);
+ }
+
+ /**
+ * Constructor.
+ * @see org.apache.hadoop.hbase.HColumnDescriptor#HColumnDescriptor(HColumnDescriptor)
+ */
+ public IdxColumnDescriptor(HColumnDescriptor desc) {
+ super(desc);
+ }
+
+ /**
+ * Constructor.
+ * @see org.apache.hadoop.hbase.HColumnDescriptor#HColumnDescriptor(byte[],
+ * int, String, boolean, boolean, int, boolean)
+ */
+ public IdxColumnDescriptor(byte[] familyName, int maxVersions,
+ String compression, boolean inMemory,
+ boolean blockCacheEnabled, int timeToLive,
+ boolean bloomFilter) {
+ super(familyName, maxVersions, compression, inMemory, blockCacheEnabled,
+ timeToLive, bloomFilter);
+ }
+
+ /**
+ * Constructor.
+ * @see org.apache.hadoop.hbase.HColumnDescriptor#HColumnDescriptor(byte[],
+ * int, String, boolean, boolean, int, int, boolean)
+ */
+ public IdxColumnDescriptor(byte[] familyName, int maxVersions,
+ String compression, boolean inMemory,
+ boolean blockCacheEnabled, int blocksize,
+ int timeToLive, boolean bloomFilter) {
+ super(familyName, maxVersions, compression, inMemory, blockCacheEnabled,
+ blocksize, timeToLive, bloomFilter);
+ }
+
+ /**
+ * Adds the index descriptor to the column family, replacing the existing
+ * index descriptor for the {@link org.apache.hadoop.hbase.client.idx.IdxIndexDescriptor#getQualifierName()
+ * qualifier name} if one exists.
+ * @param descriptor the descriptor
+ * @throws IllegalArgumentException if an index descriptor already exists for
+ * the qualifier
+ * @throws NullPointerException if the provided descriptor has a null
+ * {@link IdxIndexDescriptor#getQualifierName()}
+ * @throws java.io.IOException if an error occurrs while attempting to
+ * write the index descriptors to the values
+ */
+ public void addIndexDescriptor(IdxIndexDescriptor descriptor)
+ throws NullPointerException, IllegalArgumentException, IOException {
+ if (descriptor.getQualifierName() == null
+ || descriptor.getQualifierName().length <= 0) {
+ throw new NullPointerException("Qualifier name cannot be null or empty");
+ }
+ ImmutableBytesWritable qualifierName
+ = new ImmutableBytesWritable(descriptor.getQualifierName());
+ Map<ImmutableBytesWritable, IdxIndexDescriptor> indexDescriptorMap
+ = getIndexDescriptors(this);
+ if (indexDescriptorMap.containsKey(qualifierName)) {
+ throw new IllegalArgumentException("An index already exists on qualifier '"
+ + Bytes.toString(descriptor.getQualifierName()) + "'");
+ }
+ indexDescriptorMap.put(qualifierName, descriptor);
+ setIndexDescriptors(this, indexDescriptorMap);
+ }
+
+ /**
+ * Removes an index descriptor if one exists for the qualifier name.
+ * @param qualifierName the qualifier name
+ * @return true if the index descriptor existed and was removed, otherwise
+ * false
+ * @throws java.io.IOException if an error occurrs while attempting to
+ * write the index descriptors to the values
+ */
+ public boolean removeIndexDescriptor(final byte[] qualifierName) throws IOException {
+ return removeIndexDescriptor(new ImmutableBytesWritable(qualifierName));
+ }
+
+ /**
+ * Removes an index descriptor if one exists for the qualifier name.
+ * @param qualifierName the qualifier name
+ * @return true if the index descriptor existed and was removed, otherwise
+ * false
+ * @throws java.io.IOException if an error occurrs while attempting to
+ * write the index descriptors to the values
+ */
+ public boolean removeIndexDescriptor(final ImmutableBytesWritable qualifierName) throws IOException {
+ Map<ImmutableBytesWritable, IdxIndexDescriptor> indexDescriptorMap
+ = getIndexDescriptors(this);
+ if (indexDescriptorMap.containsKey(qualifierName)) {
+ indexDescriptorMap.remove(qualifierName);
+ setIndexDescriptors(this, indexDescriptorMap);
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ /**
+ * Returns the index descriptor matching the provided qualifier name.
+ * @param qualifierName the qualifier name
+ * @return the index descriptor or null
+ * @throws java.io.IOException if an error occurrs while reading the index descriptor
+ */
+ public IdxIndexDescriptor getIndexDescriptor(final byte[] qualifierName) throws IOException {
+ return getIndexDescriptor(new ImmutableBytesWritable(qualifierName));
+ }
+
+ /**
+ * Returns the index descriptor matching the provided qualifier name.
+ * @param qualifierName the qualifier name
+ * @return the index descriptor or null
+ * @throws java.io.IOException if an error occurrs while reading the index descriptor
+ */
+ public IdxIndexDescriptor getIndexDescriptor(final ImmutableBytesWritable qualifierName) throws IOException {
+ return hasIndexDescriptors(this) ? getIndexDescriptors(this).get(qualifierName) : null;
+ }
+
+ /**
+ * Returns an unmodifiable set of index descriptions associated with this
+ * column family.
+ * @return the set of index descriptios (never null)
+ * @throws java.io.IOException if an error occurrs while reading the index
+ * descriptors
+ */
+ public Set<IdxIndexDescriptor> getIndexDescriptors() throws IOException {
+ Set<IdxIndexDescriptor> set = new HashSet<IdxIndexDescriptor>();
+ if (hasIndexDescriptors(this)) {
+ set.addAll(getIndexDescriptors(this).values());
+ }
+ return Collections.unmodifiableSet(set);
+ }
+
+ /**
+ * Returns a set for the qualifiers that currently have an index.
+ * @return the set of indexed qualifiers
+ * @throws java.io.IOException if an error occurrs while reading the index
+ * descriptors
+ */
+ public Set<ImmutableBytesWritable> getIndexedQualifiers() throws IOException {
+ Set<ImmutableBytesWritable> set = new HashSet<ImmutableBytesWritable>();
+ if (hasIndexDescriptors(this)) {
+ set.addAll(getIndexDescriptors(this).keySet());
+ }
+ return Collections.unmodifiableSet(set);
+ }
+
+ /**
+ * Helper method to check if a column descriptor contains index descriptors.
+ * @param columnDescriptor the column descriptor
+ * @return true if there are index descriptors, otherwise false
+ */
+ public static boolean hasIndexDescriptors(HColumnDescriptor columnDescriptor) {
+ return columnDescriptor.getValues().containsKey(INDEX_DESCRIPTORS);
+ }
+
+ /**
+ * Helper method to get a map of index descriptors from the
+ * {@link org.apache.hadoop.hbase.HColumnDescriptor#getValues() values} meta-
+ * data available on a column descriptpor.
+ * @param columnDescriptor the column descriptor
+ * @return the map of index descriptors (never null)
+ * @throws IOException if an error occurrs while reading the index descriptors
+ */
+ public static Map<ImmutableBytesWritable, IdxIndexDescriptor> getIndexDescriptors(HColumnDescriptor columnDescriptor) throws IOException {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> values = columnDescriptor.getValues();
+ if (hasIndexDescriptors(columnDescriptor)) {
+ DataInputBuffer in = new DataInputBuffer();
+ byte[] bytes = values.get(INDEX_DESCRIPTORS).get();
+ in.reset(bytes, bytes.length);
+
+ int size = in.readInt();
+ Map<ImmutableBytesWritable, IdxIndexDescriptor> indexDescriptors
+ = new HashMap<ImmutableBytesWritable, IdxIndexDescriptor>(size);
+
+ for (int i = 0; i < size; i++) {
+ IdxIndexDescriptor indexDescriptor
+ = WritableHelper.readInstance(in, IdxIndexDescriptor.class);
+ indexDescriptors.put(new ImmutableBytesWritable(indexDescriptor.getQualifierName()), indexDescriptor);
+ }
+
+ return indexDescriptors;
+ } else {
+ return new HashMap<ImmutableBytesWritable, IdxIndexDescriptor>();
+ }
+ }
+
+ /**
+ * Helper method to set a map of index descriptors on the
+ * {@link org.apache.hadoop.hbase.HColumnDescriptor#getValues() values} meta-
+ * data available on a column descriptor.
+ * @param columnDescriptor the column descriptor
+ * @param indexDescriptorMap the map of index descriptors
+ * @throws IOException if an error occurrs while writing the index descriptors
+ */
+ public static void setIndexDescriptors(HColumnDescriptor columnDescriptor, Map<ImmutableBytesWritable, IdxIndexDescriptor> indexDescriptorMap) throws IOException {
+ DataOutputBuffer out = new DataOutputBuffer();
+ out.writeInt(indexDescriptorMap.size());
+ for (IdxIndexDescriptor indexDescriptor : indexDescriptorMap.values()) {
+ WritableHelper.writeInstance(out, indexDescriptor);
+ }
+
+ columnDescriptor.setValue(INDEX_DESCRIPTORS.get(), out.getData());
+ }
+}