You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2010/01/09 22:10:05 UTC

svn commit: r897547 [1/10] - in /hadoop/hbase/branches/0.20_on_hadoop-0.18.3: ./ bin/ conf/ lib/ src/contrib/ src/contrib/ec2/ src/contrib/ec2/bin/ src/contrib/ec2/bin/image/ src/contrib/indexed/ src/contrib/indexed/lib/ src/contrib/indexed/lib/fmpp-0....

Author: apurtell
Date: Sat Jan  9 21:09:59 2010
New Revision: 897547

URL: http://svn.apache.org/viewvc?rev=897547&view=rev
Log:
pull up to latest (0.20.3RC1)

Added:
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/lib/zookeeper-3.2.2.jar   (with props)
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/build.xml
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/README.txt
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/build-fmpp.xml
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/build.xml
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/commons-lang-2.4.jar   (with props)
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/easymock-2.5.2.LICENSE.txt
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/easymock-2.5.2.jar   (with props)
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/bsh.jar   (with props)
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/fmpp.jar   (with props)
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/freemarker.jar   (with props)
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/oro.jar   (with props)
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/resolver.jar   (with props)
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/data/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/data/types.csv
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/java/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/java/org/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/java/org/apache/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/support/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/test/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/test/org/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/test/org/apache/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/support/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/JmxHelper.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/WritableHelper.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxColumnDescriptor.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxIndexDescriptor.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxQualifierType.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/IdxScan.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/And.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/Comparison.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/Compound.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/Expression.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/exp/Or.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/client/idx/package.html
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/CompleteIndex.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/CompleteIndexBuilder.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/EmptyIndex.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxExpressionEvaluator.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxIndex.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxRegion.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxRegionIndexManager.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxRegionMBean.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxRegionMBeanImpl.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/IdxSearchContext.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/Bits.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/Callback.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/IdxClassSize.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/BigDecimalArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/BinarySearch.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ByteArrayArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ByteArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/CharArrayArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/CharArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/DoubleArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/FloatArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/IntegerArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/List.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/LongArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ObjectArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/arrays/ShortArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/BitSet.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/IntSet.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/IntSetBase.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/IntSetBuilder.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/java/org/apache/hadoop/hbase/regionserver/idx/support/sets/SparseBitSet.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/TestIdxHBaseCluster.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/TestIdxMasterAdmin.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/TestWritableHelper.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/TestIdxColumnDescriptor.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/TestIdxIndexDescriptor.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/TestIdxScan.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/exp/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/exp/TestComparison.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/client/idx/exp/TestExpression.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestCompleteIndex.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestHRegionWithIdxRegion.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestHRegionWithIdxRegionNoIndexes.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestIdxExpressionEvaluator.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestIdxRegion.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestIdxRegionIndexManager.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/TestIdxRegionPerformance.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/TestBits.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/TestIdxClassSize.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestBigDecimalArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestByteArrayArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestByteArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestCharArrayArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestCharArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestDoubleArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestFloatArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestIntegerArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestLongArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/arrays/TestShortArrayList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/sets/
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/sets/IntSetBaseTestCase.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/sets/TestBitSet.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/test/org/apache/hadoop/hbase/regionserver/idx/support/sets/TestSparseBitSet.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/MemStoreScanner.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/util/TestPair.java
Removed:
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/lib/zookeeper-3.2.1.jar
Modified:
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/CHANGES.txt
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/Formatter.rb
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/HBase.rb
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase-config.sh
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase-daemon.sh
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hirb.rb
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/regionservers.sh
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/zookeepers.sh
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/build.xml
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/conf/hbase-env.sh
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/build-contrib.xml
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-env.sh
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-init-remote.sh
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/image/create-hbase-image-remote
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-zookeeper
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/docs/src/documentation/content/xdocs/metrics.xml
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ClusterStatus.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HConstants.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HMerge.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/Leases.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Scan.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterList.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/HMaster.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/ProcessRegionClose.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/KeyValueSkipListSet.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/Store.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/Bytes.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/FSUtils.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/Pair.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/Writables.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWrapper.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestStore.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/util/TestBytes.java
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/webapps/master/master.jsp
    hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/webapps/master/table.jsp

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/CHANGES.txt?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/CHANGES.txt (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/CHANGES.txt Sat Jan  9 21:09:59 2010
@@ -28,9 +28,23 @@
                its regions around
    HBASE-2065  Cannot disable a table if any of its region is opening 
                at the same time
+   HBASE-2026  NPE in StoreScanner on compaction
+   HBASE-2075  Master requires HDFS superuser privileges due to waitOnSafeMode
+   HBASE-2077  NullPointerException with an open scanner that expired causing 
+               an immediate region server shutdown (Sam Pullara via JD)
+   HBASE-2078  Add JMX settings as commented out lines to hbase-env.sh
+               (Lars George via JD)
+   HBASE-2082  TableInputFormat is ignoring input scan's stop row setting
+               (Scott Wang via Andrew Purtell)
+   HBASE-2068  MetricsRate is missing "registry" parameter
+               (Lars George and Gary Helmling via Stack)
+   HBASE-2035  Binary values are formatted wrong in shel
+   HBASE-2094  hbase-2037 breaks mapreduce jobs going from 0.20.2 to 0.20.3
+   HBASE-2093  [stargate] RowSpec parse bug (Andrew Purtell via JD)
+   HBASE-2097  Deadlock between HRegion.put and HRegion.close (Stack via JD)
+   HBASE-2100  [EC2] Adjust fs.file-max
 
   IMPROVEMENTS
-   HBASE-1961  HBase EC2 scripts
    HBASE-1970  Export does one version only; make it configurable how many
                it does
    HBASE-1975  SingleColumnValueFilter: Add ability to match the value of
@@ -58,6 +72,31 @@
                via Andrew Purtell)
    HBASE-1982  [EC2] Handle potentially large and uneven instance startup
                times
+   HBASE-2062  Metrics documentation outdated (Lars George via JD)
+   HBASE-2045  Update trunk and branch zk to just-release 3.2.2.
+   HBASE-2074  Improvements to the hadoop-config script (Bassam Tabbara via
+               Stack)
+   HBASE-2076  Many javadoc warnings
+   HBASE-2068  MetricsRate is missing "registry" parameter (Lars George via JD)
+   HBASE-2025  0.20.2 accessed from older client throws
+               UndeclaredThrowableException; frustrates rolling upgrade
+   HBASE-2081  Set the retries higher in shell since client pause is lower
+   HBASE-1956  Export HDFS read and write latency as a metric
+   HBASE-2080  [EC2] Support multivolume local instance storage
+   HBASE-2083  [EC2] HDFS DataNode no longer required on master
+   HBASE-2084  [EC2] JAVA_HOME handling broken
+   HBASE-2053  Upper bound of outstanding WALs can be overrun
+   HBASE-1996  Configure scanner buffer in bytes instead of number of rows
+               (Erik Rozendaal and Dave Latham via Stack)
+   HBASE-2021  Add compaction details to master UI
+               (Lars George via Stack)
+   HBASE-2095  TIF should support more confs for the scanner (Bassam Tabbara
+               via Andrew Purtell)
+
+  NEW FEATURES
+   HBASE-1961  HBase EC2 scripts
+   HBASE-2037  Alternate indexed hbase implementation; speeds scans by adding
+               indexes to regions rather secondary tables
 
 Release 0.20.2 - November 18th, 2009
   INCOMPATIBLE CHANGES

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/Formatter.rb
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/Formatter.rb?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/Formatter.rb (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/Formatter.rb Sat Jan  9 21:09:59 2010
@@ -95,8 +95,7 @@
       if str.instance_of? Fixnum
           return
       end
-      # Remove double-quotes added by 'dump'.
-      return str.dump[1..-2]
+      return str
     end
 
     def output(width, str)

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/HBase.rb
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/HBase.rb?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/HBase.rb (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/HBase.rb Sat Jan  9 21:09:59 2010
@@ -20,9 +20,6 @@
 import org.apache.hadoop.hbase.client.Delete
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter
 import org.apache.hadoop.hbase.HConstants
-import org.apache.hadoop.hbase.io.BatchUpdate
-import org.apache.hadoop.hbase.io.RowResult
-import org.apache.hadoop.hbase.io.Cell
 import org.apache.hadoop.hbase.io.hfile.Compression
 import org.apache.hadoop.hbase.HBaseConfiguration
 import org.apache.hadoop.hbase.HColumnDescriptor
@@ -204,7 +201,7 @@
       htd = HTableDescriptor.new(tableName)
       for arg in args
         if arg.instance_of? String
-          htd.addFamily(HColumnDescriptor.new(makeColumnName(arg)))
+          htd.addFamily(HColumnDescriptor.new(arg))
         else
           raise TypeError.new(arg.class.to_s + " of " + arg.to_s + " is not of Hash type") \
             unless arg.instance_of? Hash
@@ -223,15 +220,20 @@
       htd = @admin.getTableDescriptor(tableName.to_java_bytes)
       method = args.delete(METHOD)
       if method == "delete"
-        @admin.deleteColumn(tableName, makeColumnName(args[NAME]))
+        @admin.deleteColumn(tableName, args[NAME])
       elsif method == "table_att"
-        args[MAX_FILESIZE]? htd.setMaxFileSize(JLong.valueOf(args[MAX_FILESIZE])) :  
-          htd.setMaxFileSize(HTableDescriptor::DEFAULT_MAX_FILESIZE);
-        args[READONLY]? htd.setReadOnly(JBoolean.valueOf(args[READONLY])) : 
-          htd.setReadOnly(HTableDescriptor::DEFAULT_READONLY);
-        args[MEMSTORE_FLUSHSIZE]? 
-          htd.setMemStoreFlushSize(JLong.valueOf(args[MEMSTORE_FLUSHSIZE])) :
-          htd.setMemStoreFlushSize(HTableDescriptor::DEFAULT_MEMSTORE_FLUSH_SIZE);
+        if args[MAX_FILESIZE]
+          htd.setMaxFileSize(JLong.valueOf(args[MAX_FILESIZE])) 
+        end
+        if args[READONLY] 
+          htd.setReadOnly(JBoolean.valueOf(args[READONLY])) 
+        end  
+        if args[MEMSTORE_FLUSHSIZE]
+          htd.setMemStoreFlushSize(JLong.valueOf(args[MEMSTORE_FLUSHSIZE]))
+        end
+        if args[DEFERRED_LOG_FLUSH]
+          htd.setDeferredLogFlush(JBoolean.valueOf(args[DEFERRED_LOG_FLUSH]))
+        end
         @admin.modifyTable(tableName.to_java_bytes, htd)
       else
         descriptor = hcd(args) 
@@ -255,18 +257,6 @@
       @formatter.footer(now)
     end
 
-    # Make a legal column  name of the passed String
-    # Check string ends in colon. If not, add it.
-    def makeColumnName(arg)
-      index = arg.index(':')
-      if not index
-        # Add a colon.  If already a colon, its in the right place,
-        # or an exception will come up out of the addFamily
-        arg << ':'
-      end
-      arg
-    end
-
     def shutdown()
       @admin.shutdown()
     end
@@ -326,7 +316,6 @@
       name = arg[NAME]
       raise ArgumentError.new("Column family " + arg + " must have a name") \
         unless name
-      name = makeColumnName(name)
       # TODO: What encoding are Strings in jruby?
       return HColumnDescriptor.new(name.to_java_bytes,
         # JRuby uses longs for ints. Need to convert.  Also constants are String 
@@ -404,6 +393,7 @@
         timestamp = args["TIMESTAMP"] || nil
         columns = args["COLUMNS"] || getAllColumns()
         cache = args["CACHE_BLOCKS"] || true
+        versions = args["VERSIONS"] || 1
         
         if columns.class == String
           columns = [columns]
@@ -425,6 +415,7 @@
           scan.setTimeStamp(timestamp)
         end
         scan.setCacheBlocks(cache)
+        scan.setMaxVersions(versions) if versions > 1
       else
         scan = Scan.new()
       end
@@ -433,14 +424,16 @@
       @formatter.header(["ROW", "COLUMN+CELL"])
       i = s.iterator()
       while i.hasNext()
-        r = i.next().getRowResult()
-        row = String.from_java_bytes r.getRow()
+        r = i.next()
+        row = Bytes::toStringBinary(r.getRow())
         if limit != -1 and count >= limit
           break
         end
-        for k, v in r
-          column = String.from_java_bytes k
-          cell = toString(column, v, maxlength)
+        for kv in r.list
+          family = String.from_java_bytes kv.getFamily()
+          qualifier = Bytes::toStringBinary(kv.getQualifier())
+          column = family + ':' + qualifier
+          cell = toString(column, kv, maxlength)
           @formatter.row([row, "column=%s, %s" % [column, cell]])
         end
         count += 1
@@ -450,14 +443,19 @@
 
     def put(row, column, value, timestamp = nil)
       now = Time.now 
-      bu = nil
+      p = nil
       if timestamp
-        bu = BatchUpdate.new(row, timestamp)
+        p = Put.new(row.to_java_bytes, timestamp)
+      else
+        p = Put.new(row.to_java_bytes)
+      end
+      split = KeyValue.parseColumn(column.to_java_bytes)
+      if split.length > 1
+        p.add(split[0], split[1], value.to_java_bytes)
       else
-        bu = BatchUpdate.new(row)
+        p.add(split[0], nil, value.to_java_bytes)
       end
-      bu.put(column, value.to_java_bytes)
-      @table.commit(bu)
+      @table.put(p)
       @formatter.header()
       @formatter.footer(now)
     end
@@ -484,20 +482,19 @@
         Bytes.equals(tn, HConstants::ROOT_TABLE_NAME)
     end
 
-    # Make a String of the passed cell.
+    # Make a String of the passed kv 
     # Intercept cells whose format we know such as the info:regioninfo in .META.
-    def toString(column, cell, maxlength)
+    def toString(column, kv, maxlength)
       if isMetaTable()
         if column == 'info:regioninfo'
-          hri = Writables.getHRegionInfoOrNull(cell.getValue())
-          return "timestamp=%d, value=%s" % [cell.getTimestamp(), hri.toString()]
+          hri = Writables.getHRegionInfoOrNull(kv.getValue())
+          return "timestamp=%d, value=%s" % [kv.getTimestamp(), hri.toString()]
         elsif column == 'info:serverstartcode'
-          return "timestamp=%d, value=%s" % [cell.getTimestamp(), \
-            Bytes.toLong(cell.getValue())]
+          return "timestamp=%d, value=%s" % [kv.getTimestamp(), \
+            Bytes.toLong(kv.getValue())]
         end
       end
-      cell.toString()
-      val = cell.toString()
+      val = "timestamp=" + kv.getTimestamp().to_s + ", value=" + Bytes::toStringBinary(kv.getValue())
       maxlength != -1 ? val[0, maxlength] : val    
     end
   
@@ -506,7 +503,7 @@
       now = Time.now 
       result = nil
       if args == nil or args.length == 0 or (args.length == 1 and args[MAXLENGTH] != nil)
-        result = @table.getRow(row.to_java_bytes)
+        get = Get.new(row.to_java_bytes)
       else
         # Its a hash.
         columns = args[COLUMN] 
@@ -520,42 +517,47 @@
           if not ts
             raise ArgumentError.new("Failed parse of " + args + ", " + args.class)
           end
-          result = @table.getRow(row.to_java_bytes, ts)
+          get = Get.new(row.to_java_bytes, ts)
         else
+          get = Get.new(row.to_java_bytes)
           # Columns are non-nil
           if columns.class == String
             # Single column
-            result = @table.get(row, columns,
-              args[TIMESTAMP]? args[TIMESTAMP]: HConstants::LATEST_TIMESTAMP,
-              args[VERSIONS]? args[VERSIONS]: 1)
+            split = KeyValue.parseColumn(columns.to_java_bytes)
+            if (split.length > 1) 
+              get.addColumn(split[0], split[1])
+            else
+              get.addFamily(split[0])
+            end
           elsif columns.class == Array
-            result = @table.getRow(row, columns.to_java(:string),
-              args[TIMESTAMP]? args[TIMESTAMP]: HConstants::LATEST_TIMESTAMP)
+            for column in columns
+              split = KeyValue.parseColumn(columns.to_java_bytes)
+              if (split.length > 1)
+                get.addColumn(split[0], split[1])
+              else
+                get.addFamily(split[0])
+              end
+            end
           else
             raise ArgumentError.new("Failed parse column argument type " +
               args + ", " + args.class)
           end
+          get.setMaxVersions(args[VERSIONS] ? args[VERSIONS] : 1)
+          if args[TIMESTAMP] 
+            get.setTimeStamp(args[TIMESTAMP])
+          end
         end
       end
+      result = @table.get(get)
       # Print out results.  Result can be Cell or RowResult.
       maxlength = args[MAXLENGTH] || -1
-      h = nil
-      if result.instance_of? RowResult
-        h = String.from_java_bytes result.getRow()
-        @formatter.header(["COLUMN", "CELL"])
-        if result
-          for k, v in result
-            column = String.from_java_bytes k
-            @formatter.row([column, toString(column, v, maxlength)])
-          end
-        end
-      else
-        # Presume Cells
-        @formatter.header()
-        if result 
-          for c in result
-            @formatter.row([toString(nil, c, maxlength)])
-          end
+      @formatter.header(["COLUMN", "CELL"])
+      if !result.isEmpty()
+        for kv in result.list()
+          family = String.from_java_bytes kv.getFamily()
+          qualifier = Bytes::toStringBinary(kv.getQualifier())
+          column = family + ':' + qualifier
+          @formatter.row([column, toString(column, kv, maxlength)])
         end
       end
       @formatter.footer(now)
@@ -612,26 +614,26 @@
     for i in 1..10
       table.put('x%d' % i, 'x:%d' % i, 'x%d' % i)
     end
-    table.get('x1', {COLUMN => 'x:1'})
+    table.get('x1', {COLUMNS => 'x:1'})
     if formatter.rowCount() != 1
       raise IOError.new("Failed first put")
     end
-    table.scan(['x:'])
+    table.scan({COLUMNS => ['x:']})
     if formatter.rowCount() != 10
       raise IOError.new("Failed scan of expected 10 rows")
     end
     # Verify that limit works.
-    table.scan(['x:'], {LIMIT => 3})
+    table.scan({COLUMNS => ['x:'], LIMIT => 4})
     if formatter.rowCount() != 3
       raise IOError.new("Failed scan of expected 3 rows")
     end
     # Should only be two rows if we start at 8 (Row x10 sorts beside x1).
-    table.scan(['x:'], {STARTROW => 'x8', LIMIT => 3})
+    table.scan({COLUMNS => ['x:'], STARTROW => 'x8', LIMIT => 3})
     if formatter.rowCount() != 2
       raise IOError.new("Failed scan of expected 2 rows")
     end
     # Scan between two rows
-    table.scan(['x:'], {STARTROW => 'x5', ENDROW => 'x8'})
+    table.scan({COLUMNS => ['x:'], STARTROW => 'x5', ENDROW => 'x8'})
     if formatter.rowCount() != 3
       raise IOError.new("Failed endrow test")
     end
@@ -643,9 +645,9 @@
     end
     # Verify that delete works
     table.delete('x1', 'x:1');
-    table.scan(['x:1'])
+    table.scan({COLUMNS => ['x:1']})
     scan1 = formatter.rowCount()
-    table.scan(['x:'])
+    table.scan({COLUMNS => ['x:']})
     scan2 = formatter.rowCount()
     if scan1 != 0 or scan2 != 9
       raise IOError.new("Failed delete test")
@@ -653,9 +655,9 @@
     # Verify that deletall works
     table.put('x2', 'x:1', 'x:1')
     table.deleteall('x2')
-    table.scan(['x:2'])
+    table.scan({COLUMNS => ['x:2']})
     scan1 = formatter.rowCount()
-    table.scan(['x:'])
+    table.scan({COLUMNS => ['x:']})
     scan2 = formatter.rowCount()
     if scan1 != 0 or scan2 != 8
       raise IOError.new("Failed deleteall test")

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase Sat Jan  9 21:09:59 2010
@@ -72,22 +72,6 @@
 COMMAND=$1
 shift
 
-# Source the hbase-env.sh.  Will have JAVA_HOME defined.
-if [ -f "${HBASE_CONF_DIR}/hbase-env.sh" ]; then
-  . "${HBASE_CONF_DIR}/hbase-env.sh"
-fi
-
-# some Java parameters
-if [ "$JAVA_HOME" != "" ]; then
-  #echo "run java in $JAVA_HOME"
-  JAVA_HOME=$JAVA_HOME
-fi
-  
-if [ "$JAVA_HOME" = "" ]; then
-  echo "Error: JAVA_HOME is not set."
-  exit 1
-fi
-
 JAVA=$JAVA_HOME/bin/java
 JAVA_HEAP_MAX=-Xmx1000m 
 

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase-config.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase-config.sh?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase-config.sh (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase-config.sh Sat Jan  9 21:09:59 2010
@@ -44,7 +44,9 @@
 this="$bin/$script"
 
 # the root of the hbase installation
-export HBASE_HOME=`dirname "$this"`/..
+if [ -z "$HBASE_HOME" ]; then
+  export HBASE_HOME=`dirname "$this"`/..
+fi
 
 #check to see if the conf dir or hbase home are given as an optional arguments
 while [ $# -gt 1 ]
@@ -71,3 +73,38 @@
 HBASE_CONF_DIR="${HBASE_CONF_DIR:-$HBASE_HOME/conf}"
 # List of hbase regions servers.
 HBASE_REGIONSERVERS="${HBASE_REGIONSERVERS:-$HBASE_CONF_DIR/regionservers}"
+
+# Source the hbase-env.sh.  Will have JAVA_HOME defined.
+if [ -f "${HBASE_CONF_DIR}/hbase-env.sh" ]; then
+  . "${HBASE_CONF_DIR}/hbase-env.sh"
+fi
+
+if [ -z "$JAVA_HOME" ]; then
+  for candidate in \
+    /usr/lib/jvm/java-6-sun \
+    /usr/lib/j2sdk1.6-sun \
+    /usr/java/jdk1.6* \
+    /usr/java/jre1.6* \
+    /Library/Java/Home ; do
+    if [ -e $candidate/bin/java ]; then
+      export JAVA_HOME=$candidate
+      break
+    fi
+  done
+  # if we didn't set it
+  if [ -z "$JAVA_HOME" ]; then
+    cat 1>&2 <<EOF
++======================================================================+
+|      Error: JAVA_HOME is not set and Java could not be found         |
++----------------------------------------------------------------------+
+| Please download the latest Sun JDK from the Sun Java web site        |
+|       > http://java.sun.com/javase/downloads/ <                      |
+|                                                                      |
+| HBase requires Java 1.6 or later.                                    |
+| NOTE: This script will find Sun Java whether you install using the   |
+|       binary or the RPM based installer.                             |
++======================================================================+
+EOF
+    exit 1
+  fi
+fi

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase-daemon.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase-daemon.sh?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase-daemon.sh (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hbase-daemon.sh Sat Jan  9 21:09:59 2010
@@ -71,10 +71,6 @@
     fi
 }
 
-if [ -f "${HBASE_CONF_DIR}/hbase-env.sh" ]; then
-  . "${HBASE_CONF_DIR}/hbase-env.sh"
-fi
-
 # get log directory
 if [ "$HBASE_LOG_DIR" = "" ]; then
   export HBASE_LOG_DIR="$HBASE_HOME/logs"

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hirb.rb
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hirb.rb?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hirb.rb (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/hirb.rb Sat Jan  9 21:09:59 2010
@@ -87,7 +87,7 @@
 # Setup the HBase module.  Create a configuration.
 # Turn off retries in hbase and ipc.  Human doesn't want to wait on N retries.
 @configuration = org.apache.hadoop.hbase.HBaseConfiguration.new()
-@configuration.setInt("hbase.client.retries.number", 5)
+@configuration.setInt("hbase.client.retries.number", 7)
 @configuration.setInt("ipc.client.connect.max.retries", 3)
 
 # Do lazy create of admin because if we are pointed at bad master, it will hang
@@ -298,8 +298,7 @@
 'Object.constants' to see a (messy) list of all constants in the environment.
 
 In case you are using binary keys or values and need to enter them into the 
-shell then use double-quotes to make use of hexadecimal or octal notations, 
-for example:
+shell then use double-quotes to make use of hexadecimal for example:
 
   hbase> get 't1', "key\\x03\\x3f\\xcd"
   hbase> get 't1', "key\\003\\023\\011"

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/regionservers.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/regionservers.sh?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/regionservers.sh (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/regionservers.sh Sat Jan  9 21:09:59 2010
@@ -51,10 +51,6 @@
 # hbase-env.sh. Save it here.
 HOSTLIST=$HBASE_REGIONSERVERS
 
-if [ -f "${HBASE_CONF_DIR}/hbase-env.sh" ]; then
-  . "${HBASE_CONF_DIR}/hbase-env.sh"
-fi
-
 if [ "$HOSTLIST" = "" ]; then
   if [ "$HBASE_REGIONSERVERS" = "" ]; then
     export HOSTLIST="${HBASE_CONF_DIR}/regionservers"

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/zookeepers.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/zookeepers.sh?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/zookeepers.sh (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/bin/zookeepers.sh Sat Jan  9 21:09:59 2010
@@ -43,10 +43,6 @@
 
 . "$bin"/hbase-config.sh
 
-if [ -f "${HBASE_CONF_DIR}/hbase-env.sh" ]; then
-  . "${HBASE_CONF_DIR}/hbase-env.sh"
-fi
-
 if [ "$HBASE_MANAGES_ZK" = "" ]; then
   HBASE_MANAGES_ZK=true
 fi

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/build.xml?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/build.xml (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/build.xml Sat Jan  9 21:09:59 2010
@@ -18,7 +18,7 @@
 -->
 
 <project name="hbase" default="jar">
-  <property name="version" value="0.20.2-0.18.3"/>
+  <property name="version" value="0.20.3-0.18.3"/>
   <property name="Name" value="HBase"/>
   <property name="final.name" value="hbase-${version}"/>
   <property name="year" value="2009"/>

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/conf/hbase-env.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/conf/hbase-env.sh?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/conf/hbase-env.sh (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/conf/hbase-env.sh Sat Jan  9 21:09:59 2010
@@ -37,7 +37,16 @@
 export HBASE_OPTS="-XX:+HeapDumpOnOutOfMemoryError -XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode"
 
 # Uncomment below to enable java garbage collection logging.
-#export HBASE_OPTS="$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log"
+# export HBASE_OPTS="$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log"
+
+# Uncomment and adjust/create "jmxremote.*" to enable JMX exporting
+# export HBASE_JMX_BASE="-Dcom.sun.management.jmxremote.ssl=false"
+# export HBASE_JMX_BASE="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.password.file=$HBASE_HOME/conf/jmxremote.password"
+# export HBASE_JMX_BASE="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.access.file=$HBASE_HOME/conf/jmxremote.access"
+# export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10101"
+# export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10102"
+# export HBASE_THRIFT_OPTS="$HBASE_THRIFT_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103"
+# export HBASE_ZOOKEEPER_OPTS="$HBASE_ZOOKEEPER_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104"
 
 # File naming hosts on which HRegionServers will run.  $HBASE_HOME/conf/regionservers by default.
 # export HBASE_REGIONSERVERS=${HBASE_HOME}/conf/regionservers

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/lib/zookeeper-3.2.2.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/lib/zookeeper-3.2.2.jar?rev=897547&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/lib/zookeeper-3.2.2.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/build-contrib.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/build-contrib.xml?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/build-contrib.xml (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/build-contrib.xml Sat Jan  9 21:09:59 2010
@@ -21,7 +21,7 @@
 
 <project name="hbasebuildcontrib">
 
-  <property name="version" value="0.20.0-0.18.3"/>
+  <property name="version" value="0.20.3-0.18.3"/>
   <property name="name" value="${ant.project.name}"/>
   <property name="root" value="${basedir}"/>
 
@@ -257,6 +257,7 @@
       errorProperty="tests.failed" failureProperty="tests.failed"
       timeout="${test.timeout}">
       
+      <jvmarg value="-ea"/>
       <sysproperty key="test.build.data" value="${build.test}/data"/>
       <sysproperty key="build.test" value="${build.test}"/>
       <sysproperty key="contrib.name" value="${name}"/>

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-env.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-env.sh?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-env.sh (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-env.sh Sat Jan  9 21:09:59 2010
@@ -34,16 +34,15 @@
 EC2_ROOT_SSH_KEY=
 
 # The version of HBase to use.
-HBASE_VERSION=0.20.2-0.18.3
+HBASE_VERSION=@HBASE_VERSION@
 
 # The version of Hadoop to use.
-HADOOP_VERSION=0.18.3
+HADOOP_VERSION=0.20.1
 
 # The Amazon S3 bucket where the HBase AMI is stored.
 # Change this value only if you are creating your own (private) AMI
 # so you can store it in a bucket you own.
-#S3_BUCKET=hbase-images
-S3_BUCKET=iridiant-bundles
+S3_BUCKET=apache-hbase-images
 
 # Enable public access web interfaces
 ENABLE_WEB_PORTS=false

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-init-remote.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-init-remote.sh?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-init-remote.sh (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-init-remote.sh Sat Jan  9 21:09:59 2010
@@ -1,9 +1,7 @@
 #!/usr/bin/env bash
 
-###############################################################################
 # Script that is run on each EC2 instance on boot. It is passed in the EC2 user
 # data, so should not exceed 16K in size.
-###############################################################################
 
 MASTER_HOST="%MASTER_HOST%"
 ZOOKEEPER_QUORUM="%ZOOKEEPER_QUORUM%"
@@ -18,9 +16,74 @@
 HBASE_HOME=`ls -d /usr/local/hbase-*`
 HBASE_VERSION=`echo $HBASE_HOME | cut -d '-' -f 2`
 
+export USER="root"
+
+# up file-max
+sysctl -w fs.file-max=32768
+
+# up ulimits
+echo "root soft nofile 32768" >> /etc/security/limits.conf
+echo "root hard nofile 32768" >> /etc/security/limits.conf
+
+# up epoll limits; ok if this fails, only valid for kernels 2.6.27+
+sysctl -w fs.epoll.max_user_instances=32768 > /dev/null 2>&1
+
+[ ! -f /etc/hosts ] &&  echo "127.0.0.1 localhost" > /etc/hosts
+
+# Extra packages
+
+if [ "$EXTRA_PACKAGES" != "" ] ; then
+  # format should be <repo-descriptor-URL> <package1> ... <packageN>
+  pkg=( $EXTRA_PACKAGES )
+  wget -nv -O /etc/yum.repos.d/user.repo ${pkg[0]}
+  yum -y update yum
+  yum -y install ${pkg[@]:1}
+fi
+
+# Ganglia
+
+if [ "$IS_MASTER" = "true" ]; then
+  sed -i -e "s|\( *mcast_join *=.*\)|#\1|" \
+         -e "s|\( *bind *=.*\)|#\1|" \
+         -e "s|\( *mute *=.*\)|  mute = yes|" \
+         -e "s|\( *location *=.*\)|  location = \"master-node\"|" \
+         /etc/gmond.conf
+  mkdir -p /mnt/ganglia/rrds
+  chown -R ganglia:ganglia /mnt/ganglia/rrds
+  rm -rf /var/lib/ganglia; cd /var/lib; ln -s /mnt/ganglia ganglia; cd
+  service gmond start
+  service gmetad start
+  apachectl start
+else
+  sed -i -e "s|\( *mcast_join *=.*\)|#\1|" \
+         -e "s|\( *bind *=.*\)|#\1|" \
+         -e "s|\(udp_send_channel {\)|\1\n  host=$MASTER_HOST|" \
+         /etc/gmond.conf
+  service gmond start
+fi
+
+# Probe for instance volumes
+
+# /dev/sdb as /mnt is always set up by base image
+DFS_NAME_DIR="/mnt/hadoop/dfs/name"
+DFS_DATA_DIR="/mnt/hadoop/dfs/data"
+i=2
+for d in c d e f g h i j k l m n o p q r s t u v w x y z; do
+  m="/mnt${i}"
+  mkdir -p $m
+  mount /dev/sd${d} $m > /dev/null 2>&1
+  if [ $? -eq 0 ] ; then
+    if [ $i -lt 3 ] ; then # no more than two namedirs
+      DFS_NAME_DIR="${DFS_NAME_DIR},${m}/hadoop/dfs/name"
+    fi
+    DFS_DATA_DIR="${DFS_DATA_DIR},${m}/hadoop/dfs/data"
+    i=$(( i + 1 ))
+  fi
+done
+
 # Hadoop configuration
 
-cat > $HADOOP_HOME/conf/hadoop-site.xml <<EOF
+cat > $HADOOP_HOME/conf/core-site.xml <<EOF
 <?xml version="1.0"?>
 <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
 <configuration>
@@ -32,11 +95,31 @@
   <name>fs.default.name</name>
   <value>hdfs://$MASTER_HOST:8020</value>
 </property>
+</configuration>
+EOF
+cat > $HADOOP_HOME/conf/hdfs-site.xml <<EOF
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration>
 <property>
   <name>fs.default.name</name>
   <value>hdfs://$MASTER_HOST:8020</value>
 </property>
 <property>
+  <name>dfs.name.dir</name>
+  <value>$DFS_NAME_DIR</value>
+</property>
+<property>
+  <name>dfs.data.dir</name>
+  <value>$DFS_DATA_DIR</value>
+</property>
+</configuration>
+EOF
+cat > $HADOOP_HOME/conf/mapred-site.xml <<EOF
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration>
+<property>
   <name>mapred.job.tracker</name>
   <value>$MASTER_HOST:8021</value>
 </property>
@@ -54,12 +137,10 @@
 </property>
 </configuration>
 EOF
-
 # Update classpath to include HBase jars and config
 cat >> $HADOOP_HOME/conf/hadoop-env.sh <<EOF
 HADOOP_CLASSPATH="$HBASE_HOME/hbase-${HBASE_VERSION}.jar:$HBASE_HOME/lib/AgileJSON-2009-03-30.jar:$HBASE_HOME/lib/json.jar:$HBASE_HOME/lib/zookeeper-3.2.1.jar:$HBASE_HOME/conf"
 EOF
-
 # Configure Hadoop for Ganglia
 cat > $HADOOP_HOME/conf/hadoop-metrics.properties <<EOF
 dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext
@@ -121,13 +202,11 @@
 </property>
 </configuration>
 EOF
-
 # Override JVM options
 cat >> $HBASE_HOME/conf/hbase-env.sh <<EOF
-export HBASE_MASTER_OPTS="-XX:+UseConcMarkSweepGC -XX:+DoEscapeAnalysis -XX:+AggressiveOpts -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Xloggc:/mnt/hbase/logs/hbase-master-gc.log"
-export HBASE_REGIONSERVER_OPTS="-XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=88 -XX:+DoEscapeAnalysis -XX:+AggressiveOpts -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Xloggc:/mnt/hbase/logs/hbase-regionserver-gc.log"
+export HBASE_MASTER_OPTS="-Xmx1000m -XX:+UseConcMarkSweepGC -XX:+DoEscapeAnalysis -XX:+AggressiveOpts -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Xloggc:/mnt/hbase/logs/hbase-master-gc.log"
+export HBASE_REGIONSERVER_OPTS="-Xmx2000m -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=88 -XX:NewSize=64m -XX:MaxNewSize=64m -XX:+DoEscapeAnalysis -XX:+AggressiveOpts -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Xloggc:/mnt/hbase/logs/hbase-regionserver-gc.log"
 EOF
-
 # Configure HBase for Ganglia
 cat > $HBASE_HOME/conf/hadoop-metrics.properties <<EOF
 dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext
@@ -141,69 +220,18 @@
 jvm.servers=$MASTER_HOST:8649
 EOF
 
-# Start services
-
-# up open file descriptor limits
-echo "root soft nofile 32768" >> /etc/security/limits.conf
-echo "root hard nofile 32768" >> /etc/security/limits.conf
-
-# up epoll limits, only valid for kernels 2.6.27+
-sysctl -w fs.epoll.max_user_instances=32768 > /dev/null 2>&1
-
 mkdir -p /mnt/hadoop/logs /mnt/hbase/logs
 
-[ ! -f /etc/hosts ] &&  echo "127.0.0.1 localhost" > /etc/hosts
-
-export USER="root"
-
-if [ "$EXTRA_PACKAGES" != "" ] ; then
-  # format should be <repo-descriptor-URL> <package1> ... <packageN>
-    # this will only work with bash
-  pkg=( $EXTRA_PACKAGES )
-  wget -nv -O /etc/yum.repos.d/user.repo ${pkg[0]}
-  yum -y update yum
-  yum -y install ${pkg[@]:1}
-fi
-
 if [ "$IS_MASTER" = "true" ]; then
-  # MASTER
-  # Prep Ganglia
-  sed -i -e "s|\( *mcast_join *=.*\)|#\1|" \
-         -e "s|\( *bind *=.*\)|#\1|" \
-         -e "s|\( *mute *=.*\)|  mute = yes|" \
-         -e "s|\( *location *=.*\)|  location = \"master-node\"|" \
-         /etc/gmond.conf
-  mkdir -p /mnt/ganglia/rrds
-  chown -R ganglia:ganglia /mnt/ganglia/rrds
-  rm -rf /var/lib/ganglia; cd /var/lib; ln -s /mnt/ganglia ganglia; cd
-  service gmond start
-  service gmetad start
-  apachectl start
-
   # only format on first boot
-  [ ! -e /mnt/hadoop/dfs ] && "$HADOOP_HOME"/bin/hadoop namenode -format
-
+  [ ! -e /mnt/hadoop/dfs/name ] && "$HADOOP_HOME"/bin/hadoop namenode -format
   "$HADOOP_HOME"/bin/hadoop-daemon.sh start namenode
-  "$HADOOP_HOME"/bin/hadoop-daemon.sh start datanode
   "$HADOOP_HOME"/bin/hadoop-daemon.sh start jobtracker
-  sleep 10
   "$HBASE_HOME"/bin/hbase-daemon.sh start master
-
 else
-
-  # SLAVE
-
-  # Prep Ganglia
-  sed -i -e "s|\( *mcast_join *=.*\)|#\1|" \
-         -e "s|\( *bind *=.*\)|#\1|" \
-         -e "s|\(udp_send_channel {\)|\1\n  host=$MASTER_HOST|" \
-         /etc/gmond.conf
-  service gmond start
   "$HADOOP_HOME"/bin/hadoop-daemon.sh start datanode
   "$HBASE_HOME"/bin/hbase-daemon.sh start regionserver
   "$HADOOP_HOME"/bin/hadoop-daemon.sh start tasktracker
-
 fi
 
-# Run this script on next boot
 rm -f /var/ec2/ec2-run-user-data.*

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/image/create-hbase-image-remote
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/image/create-hbase-image-remote?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/image/create-hbase-image-remote (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/image/create-hbase-image-remote Sat Jan  9 21:09:59 2010
@@ -37,7 +37,7 @@
 # Install Java
 echo "Downloading and installing java binary."
 cd /usr/local
-wget -nv -O java.bin http://iridiant.s3.amazonaws.com/jdk/jdk-${JAVA_VERSION}-linux-${arch}.bin
+wget -nv -O java.bin http://hbase.s3.amazonaws.com/jdk/jdk-${JAVA_VERSION}-linux-${arch}.bin
 sh java.bin
 rm -f java.bin
 
@@ -66,14 +66,14 @@
 # Install HBase
 echo "Installing HBase $HBASE_VERSION."
 cd /usr/local
-wget -nv http://iridiant.s3.amazonaws.com/hbase/hbase-$HBASE_VERSION.tar.gz
+wget -nv http://hbase.s3.amazonaws.com/hbase/hbase-$HBASE_VERSION.tar.gz
 tar xzf hbase-$HBASE_VERSION.tar.gz
 rm -f hbase-$HBASE_VERSION.tar.gz
 
 # Configure HBase
 sed -i \
   -e "s|# export JAVA_HOME=.*|export JAVA_HOME=/usr/local/jdk${JAVA_VERSION}|" \
-  -e 's|# export HBASE_OPTS=.*|export HBASE_OPTS="$HBASE_OPTS -server -XX:+HeapDumpOnOutOfMemoryError -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=90 -XX:NewSize=64m -XX:MaxNewSize=64m -XX:+DoEscapeAnalysis -XX:+AggressiveOpts"|' \
+  -e 's|# export HBASE_OPTS=.*|export HBASE_OPTS="$HBASE_OPTS -server -XX:+HeapDumpOnOutOfMemoryError"|' \
   -e 's|# export HBASE_LOG_DIR=.*|export HBASE_LOG_DIR=/mnt/hbase/logs|' \
   -e 's|# export HBASE_SLAVE_SLEEP=.*|export HBASE_SLAVE_SLEEP=1|' \
   /usr/local/hbase-$HBASE_VERSION/conf/hbase-env.sh
@@ -83,7 +83,8 @@
 echo "/etc/init.d/ec2-run-user-data" >> /etc/rc.d/rc.local
 
 # Setup root user bash environment
-echo "export JAVA_HOME=/usr/local/jdk${JAVA_VERSION}" >> /root/.bash_profile
+
+  echo "export JAVA_HOME=/usr/local/jdk${JAVA_VERSION}" >> /root/.bash_profile
 echo "export HADOOP_HOME=/usr/local/hadoop-${HADOOP_VERSION}" >> /root/.bash_profile
 echo "export HBASE_HOME=/usr/local/hbase-${HBASE_VERSION}" >> /root/.bash_profile
 echo 'export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$HBASE_HOME/bin:$PATH' >> /root/.bash_profile
@@ -96,7 +97,7 @@
 
 # Install LZO
 echo "Installing LZO codec support"
-wget -nv -O /tmp/lzo-linux-${HADOOP_VERSION}.tar.gz http://iridiant.s3.amazonaws.com/hbase/lzo-linux-${HADOOP_VERSION}.tar.gz
+wget -nv -O /tmp/lzo-linux-${HADOOP_VERSION}.tar.gz http://hbase.s3.amazonaws.com/hbase/lzo-linux-${HADOOP_VERSION}.tar.gz
 cd /usr/local/hadoop-${HADOOP_VERSION} && tar xzf /tmp/lzo-linux-${HADOOP_VERSION}.tar.gz
 cd /usr/local/hbase-${HBASE_VERSION} && tar xzf /tmp/lzo-linux-${HADOOP_VERSION}.tar.gz
 rm -f /tmp/lzo-linux-${HADOOP_VERSION}.tar.gz

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-zookeeper
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-zookeeper?rev=897547&r1=897546&r2=897547&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-zookeeper (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-zookeeper Sat Jan  9 21:09:59 2010
@@ -77,8 +77,9 @@
 
 # Start Zookeeper quorum
 
-sleep 5
+sleep 10
 echo "Initializing the ZooKeeper quorum ensemble."
+
 for host in $public_names ; do 
   echo "    $host"
   scp $SSH_OPTS "$bin"/hbase-ec2-init-zookeeper-remote.sh "root@${host}:/var/tmp"

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/build.xml?rev=897547&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/build.xml (added)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/build.xml Sat Jan  9 21:09:59 2010
@@ -0,0 +1,24 @@
+<project name="ec2" default="package" basedir=".">
+  <import file="../build-contrib.xml"/>
+
+  <target name="compile">
+    <copy todir="${build.dir}">
+      <fileset dir="${root}">
+	<exclude name="build.xml"/>
+      </fileset>
+    </copy>
+    <exec executable="sed">
+      <arg value="-i"/>
+      <arg value="-e"/> <arg value="s/@HBASE_VERSION@/${version}/g"/>
+      <arg value="${build.dir}/bin/hbase-ec2-env.sh"/>
+    </exec>
+  </target>
+
+  <target name="package" depends="compile">
+    <mkdir dir="${dist.dir}/contrib/${name}"/>
+    <copy todir="${dist.dir}/contrib/${name}">
+      <fileset dir="${build.dir}"/>
+    </copy>
+  </target>
+
+</project>

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/README.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/README.txt?rev=897547&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/README.txt (added)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/README.txt Sat Jan  9 21:09:59 2010
@@ -0,0 +1,3 @@
+This contrib contains indexed hbase (IHBase).
+For how to use, include hbase-X.X.X-indexed.jar in your CLASSPATH and follow
+the instruction in javadoc under the respective packages: org.apache.hadoop.hbase.$foobar$.indexed.

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/build-fmpp.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/build-fmpp.xml?rev=897547&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/build-fmpp.xml (added)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/build-fmpp.xml Sat Jan  9 21:09:59 2010
@@ -0,0 +1,34 @@
+<?xml version="1.0"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!--
+ Used to generate code from fmpp templates. To use
+ # ant -f build-fmpp.xml -lib lib/fmpp-0.19.14
+-->
+<project name="indexed" default="generate">
+  
+  <taskdef name="fmpp" classname="fmpp.tools.AntTask" />
+
+  <target name="generate">
+    <fmpp
+      sourceRoot="src/fmpp/src" outputRoot="src"
+      data="types:csv(data/types.csv)"
+      />
+  </target>
+</project>

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/build.xml?rev=897547&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/build.xml (added)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/build.xml Sat Jan  9 21:09:59 2010
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!-- 
+Before you can run these subtargets directly, you need 
+to call at top-level: ant deploy-contrib compile-core-test
+-->
+<project name="indexed" default="jar">
+  <import file="../build-contrib.xml"/>
+      
+</project>

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/commons-lang-2.4.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/commons-lang-2.4.jar?rev=897547&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/commons-lang-2.4.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/easymock-2.5.2.LICENSE.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/easymock-2.5.2.LICENSE.txt?rev=897547&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/easymock-2.5.2.LICENSE.txt (added)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/easymock-2.5.2.LICENSE.txt Sat Jan  9 21:09:59 2010
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/easymock-2.5.2.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/easymock-2.5.2.jar?rev=897547&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/easymock-2.5.2.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/bsh.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/bsh.jar?rev=897547&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/bsh.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/fmpp.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/fmpp.jar?rev=897547&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/fmpp.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/freemarker.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/freemarker.jar?rev=897547&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/freemarker.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/oro.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/oro.jar?rev=897547&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/oro.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/resolver.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/resolver.jar?rev=897547&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/lib/fmpp-0.19.14/resolver.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/data/types.csv
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/data/types.csv?rev=897547&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/data/types.csv (added)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/indexed/src/fmpp/src/data/types.csv Sat Jan  9 21:09:59 2010
@@ -0,0 +1,11 @@
+displayName;clazz;primitive;kind
+Byte;Byte;byte;integer
+Char;Character;char;integer
+Short;Short;short;integer
+Integer;Integer;int;integer
+Long;Long;long;integer
+Float;Float;float;floatingPoint
+Double;Double;double;floatingPoint
+ByteArray;byte[];byte[];integerArray
+CharArray;char[];char[];integerArray
+BigDecimal;BigDecimal;BigDecimal;comparable
\ No newline at end of file