You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by se...@apache.org on 2010/03/19 17:59:01 UTC

svn commit: r925326 [1/4] - in /directory/sandbox/seelmann/hbase-partition-test: ./ src/ src/main/ src/test/ src/test/java/ src/test/java/org/ src/test/java/org/apache/ src/test/java/org/apache/directory/ src/test/java/org/apache/directory/server/ src/...

Author: seelmann
Date: Fri Mar 19 16:59:00 2010
New Revision: 925326

URL: http://svn.apache.org/viewvc?rev=925326&view=rev
Log:
moved tests into separate module

Added:
    directory/sandbox/seelmann/hbase-partition-test/pom.xml
    directory/sandbox/seelmann/hbase-partition-test/src/
    directory/sandbox/seelmann/hbase-partition-test/src/main/
    directory/sandbox/seelmann/hbase-partition-test/src/test/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseClusterTestCaseAdapter.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedRunner.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseEmbeddedRunner.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/JdbmRunner.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/eval/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/eval/ScanFilterTest.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/index/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/AbstractHBasePartitionIT.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/HBasePartitionIT.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/HBasePartitionPerformanceIT.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/HBaseRunner.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/GetPerformanceEvaluation.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/LdifImportAndIndexIT.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteLdifImport.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteRunner.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/AbstractHBaseTableTest.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTableTest.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseMasterTableTest.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBasePresenceIndexTableTest.java
    directory/sandbox/seelmann/hbase-partition-test/src/test/resources/
    directory/sandbox/seelmann/hbase-partition-test/src/test/resources/hbase-site.xml
    directory/sandbox/seelmann/hbase-partition-test/src/test/resources/log4j.properties
    directory/sandbox/seelmann/hbase-partition-test/src/test/resources/mapred-site.xml
    directory/sandbox/seelmann/hbase-partition-test/src/test/resources/testdata-5.ldif
Modified:
    directory/sandbox/seelmann/hbase-partition-test/   (props changed)

Propchange: directory/sandbox/seelmann/hbase-partition-test/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Mar 19 16:59:00 2010
@@ -0,0 +1,5 @@
+.classpath
+.project
+target
+.settings
+

Added: directory/sandbox/seelmann/hbase-partition-test/pom.xml
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/pom.xml?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/pom.xml (added)
+++ directory/sandbox/seelmann/hbase-partition-test/pom.xml Fri Mar 19 16:59:00 2010
@@ -0,0 +1,271 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+  <!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements. See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to you under the Apache License, Version
+    2.0 (the "License"); you may not use this file except in compliance
+    with the License. You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0 Unless required by
+    applicable law or agreed to in writing, software distributed under
+    the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+    OR CONDITIONS OF ANY KIND, either express or implied. See the
+    License for the specific language governing permissions and
+    limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.directory.server</groupId>
+    <artifactId>apacheds-parent</artifactId>
+    <version>1.5.6-SNAPSHOT</version>
+  </parent>
+  <artifactId>apacheds-hbase-partition-test</artifactId>
+  <name>ApacheDS HBase Partition Test</name>
+  <packaging>jar</packaging>
+
+  <description>
+        Tests for HBase partition.
+  </description>
+
+  <dependencies>
+
+    <dependency>
+      <groupId>${pom.groupId}</groupId>
+      <artifactId>apacheds-hbase-partition</artifactId>
+      <version>${pom.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>commons-collections</groupId>
+      <artifactId>commons-collections</artifactId>
+      <version>3.2.1</version>
+    </dependency>
+    
+    <!-- Dependencies to ApacheDS, for partition implementation -->
+    <dependency>
+      <groupId>${pom.groupId}</groupId>
+      <artifactId>apacheds-core</artifactId>
+      <version>${pom.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${pom.groupId}</groupId>
+      <artifactId>apacheds-xdbm-base</artifactId>
+      <version>${pom.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${pom.groupId}</groupId>
+      <artifactId>apacheds-xdbm-search</artifactId>
+      <version>${pom.version}</version>
+    </dependency>
+
+    <!-- ApacheDS integration test framework -->
+    <dependency>
+      <groupId>org.apache.directory.server</groupId>
+      <artifactId>apacheds-server-integ</artifactId>
+      <version>${pom.version}</version>
+      <scope>test</scope>
+    </dependency>
+
+    <!-- HBase dependencies -->
+    <!-- 
+      Unfortunately HBase artifacts are not yet available in public maven repo.
+      So I created a private repo under p.a.o/~seelmann and use groupId 
+      "org.apache.directory.hbase" to avoid conflicts.
+    -->
+    <dependency>
+      <groupId>org.apache.directory.hbase</groupId>
+      <artifactId>hbase</artifactId>
+      <version>0.20.3-RC3</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.directory.hbase</groupId>
+      <artifactId>hadoop-core</artifactId>
+      <version>0.20.1-hdfs127</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>1.0.4</version>
+    </dependency>
+    
+    <!-- HBase test dependencies -->
+    <!-- 
+      Unfortunately HBase artifacts are not yet available in public maven repo.
+      So I created a private repo under p.a.o/~seelmann and use groupId 
+      "org.apache.directory.hbase" to avoid conflicts.
+    -->
+    <dependency>
+      <groupId>org.apache.directory.hbase</groupId>
+      <artifactId>hadoop-test</artifactId>
+      <version>0.20.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.directory.hbase</groupId>
+      <artifactId>hbase-test</artifactId>
+      <version>0.20.3-RC3</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.directory.hbase</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>3.2.2</version>
+    </dependency>
+    <!-- HBase transitive test depencencies -->
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+      <version>2.4</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-httpclient</groupId>
+      <artifactId>commons-httpclient</artifactId>
+      <version>3.0.1</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+      <version>1.2</version>
+    </dependency>
+    <dependency>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jetty</artifactId>
+      <version>6.1.14</version>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <!-- 
+      <plugin>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <includes>
+            <include>**/*IT.java</include>
+          </includes>
+        </configuration>
+      </plugin>
+       -->
+      <!-- Delete build and test directories that were crated by hbase tests -->
+      <plugin>
+        <artifactId>maven-clean-plugin</artifactId>
+        <configuration>
+          <filesets>
+            <fileset>
+              <directory>build</directory>
+            </fileset>            
+            <fileset>
+              <directory>test</directory>
+            </fileset>            
+          </filesets>
+        </configuration>
+      </plugin>
+      
+      <!-- Install hadoop hbase artifacts, they are not available in maven repo -->
+      <!-- 
+      <plugin>
+        <artifactId>maven-install-plugin</artifactId>
+        <version>2.3</version>
+        <executions>
+          <execution>
+            <id>install-hadoop-core</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>install-file</goal>
+            </goals>
+            <configuration>
+              <file>lib/hadoop-0.20.1-hdfs127-core.jar</file>
+              <localRepositoryPath>repo</localRepositoryPath>
+              <groupId>org.apache.directory.hbase</groupId>
+              <artifactId>hadoop-core</artifactId>
+              <version>0.20.1-hdfs127</version>
+              <packaging>jar</packaging>
+            </configuration>
+          </execution>
+          <execution>
+            <id>install-hadoop-test</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>install-file</goal>
+            </goals>
+            <configuration>
+              <file>lib/hadoop-0.20.1-test.jar</file>
+              <localRepositoryPath>repo</localRepositoryPath>
+              <groupId>org.apache.directory.hbase</groupId>
+              <artifactId>hadoop-test</artifactId>
+              <version>0.20.1</version>
+              <packaging>jar</packaging>
+            </configuration>
+          </execution>
+          <execution>
+            <id>install-hbase</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>install-file</goal>
+            </goals>
+            <configuration>
+              <file>lib/hbase-0.20.3-RC3.jar</file>
+              <localRepositoryPath>repo</localRepositoryPath>
+              <groupId>org.apache.directory.hbase</groupId>
+              <artifactId>hbase</artifactId>
+              <version>0.20.3-RC3</version>
+              <packaging>jar</packaging>
+            </configuration>
+          </execution>
+          <execution>
+            <id>install-hbase-test</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>install-file</goal>
+            </goals>
+            <configuration>
+              <file>lib/hbase-0.20.3-RC3-test.jar</file>
+              <localRepositoryPath>repo</localRepositoryPath>
+              <groupId>org.apache.directory.hbase</groupId>
+              <artifactId>hbase-test</artifactId>
+              <version>0.20.3-RC3</version>
+              <packaging>jar</packaging>
+            </configuration>
+          </execution>
+          <execution>
+            <id>install-zookeeper</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>install-file</goal>
+            </goals>
+            <configuration>
+              <file>lib/zookeeper-3.2.2.jar</file>
+              <localRepositoryPath>repo</localRepositoryPath>
+              <groupId>org.apache.directory.hbase</groupId>
+              <artifactId>zookeeper</artifactId>
+              <version>3.2.2</version>
+              <packaging>jar</packaging>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+       -->
+    </plugins>
+  </build>
+
+  <repositories>
+    <!-- 
+      Unfortunately HBase artifacts are not yet available in public maven repo.
+      So I created a private repo under p.a.o/~seelmann and use groupId 
+      "org.apache.directory.hbase" to avoid conflicts.
+    -->  
+    <repository>
+        <id>hbase-partition-private-repository</id>
+        <url>http://people.apache.org/~seelmann/hbase-partition/repository</url>
+        <snapshots>
+          <enabled>false</enabled>
+        </snapshots>
+     </repository>
+  </repositories>
+
+</project>
+
+

Added: directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseClusterTestCaseAdapter.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseClusterTestCaseAdapter.java?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseClusterTestCaseAdapter.java (added)
+++ directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseClusterTestCaseAdapter.java Fri Mar 19 16:59:00 2010
@@ -0,0 +1,122 @@
+/*
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing,
+ *   software distributed under the License is distributed on an
+ *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *   KIND, either express or implied.  See the License for the
+ *   specific language governing permissions and limitations
+ *   under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase;
+
+
+import java.io.File;
+
+import org.apache.hadoop.hbase.HBaseClusterTestCase;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.mapred.MiniMRCluster;
+import org.junit.Ignore;
+
+
+/**
+ * Adapter for {@link HBaseClusterTestCase}. The setup() method
+ * starts up a Mini DFS and HBase cluster, ready to be used for
+ * unit tests.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+@Ignore
+public class HBaseClusterTestCaseAdapter extends HBaseClusterTestCase
+{
+
+    protected MiniMRCluster mrCluster;
+    
+    /**
+     * Instantiates a new HBaseClusterTestCase.
+     * 
+     * @param clazz the class
+     * @param startDfs true to start a MiniDFS (distributed file system)
+     */
+    public HBaseClusterTestCaseAdapter( Class<?> clazz, boolean startDfs ) throws Exception
+    {
+        super( 1, startDfs );
+        setName( clazz.getName() );
+
+        // use target as test  directory base...
+        File testDir = new File( "target/data" );
+        // ...for zookeeper
+        conf.set( TEST_DIRECTORY_KEY, testDir.getAbsolutePath() );
+        // ...for hdfs
+        System.setProperty( "test.build.data", testDir.getAbsolutePath() );
+        // ...for map/reduce
+        System.setProperty("hadoop.log.dir", testDir.getAbsolutePath() + "/log");
+
+        // setup local file system if no DFS is used
+        if ( !startDfs )
+        {
+            String unitTestDir = getUnitTestdir( getName() ).toString();
+            String hbaseRootDirUrl = new File( unitTestDir, "hbase" ).toURI().toURL().toString();
+            conf.set( HConstants.HBASE_DIR, hbaseRootDirUrl );
+        }
+    }
+
+
+    @Override
+    public void setUp() throws Exception
+    {
+        // don't open META table in setUp, sometimes timeouts occurs...
+        if ( !startDfs )
+        {
+            super.setOpenMetaTable( false );
+        }
+
+        super.setUp();
+        
+        if(startDfs)
+        {
+            // These are needed for the new and improved Map/Reduce framework
+            conf.set("mapred.output.dir", conf.get("hadoop.tmp.dir"));
+            mrCluster = new MiniMRCluster(2, fs.getUri().toString(), 1);
+        }
+
+        // opening the META table ensures that cluster is running
+        //Thread.sleep( 10000 );
+        //new HTable(conf, HConstants.META_TABLE_NAME);
+    }
+
+
+    @Override
+    public void tearDown() throws Exception
+    {
+        super.tearDown();
+        
+        if(startDfs)
+        {
+            mrCluster.shutdown();
+        }
+    }
+
+
+    /**
+     * Gets the HBase configurtion.
+     * 
+     * @return the HBase configurtion
+     */
+    public HBaseConfiguration getHBaseConfigurtion()
+    {
+        return conf;
+    }
+
+}

Added: directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedRunner.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedRunner.java?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedRunner.java (added)
+++ directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedRunner.java Fri Mar 19 16:59:00 2010
@@ -0,0 +1,104 @@
+/*
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing,
+ *   software distributed under the License is distributed on an
+ *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *   KIND, either express or implied.  See the License for the
+ *   specific language governing permissions and limitations
+ *   under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase;
+
+import org.apache.directory.server.annotations.CreateLdapServer;
+import org.apache.directory.server.annotations.CreateTransport;
+import org.apache.directory.server.core.CoreSession;
+import org.apache.directory.server.core.annotations.CreateDS;
+import org.apache.directory.server.core.annotations.CreatePartition;
+import org.apache.directory.server.core.integ.AbstractLdapTestUnit;
+import org.apache.directory.server.core.integ.FrameworkRunner;
+import org.apache.directory.server.core.partition.hbase.it.AbstractHBasePartitionIT;
+import org.apache.directory.shared.ldap.name.DN;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+
+/**
+ * Starts up an ApacheDS LDAP server with an HBase partition.
+ * A running HBase instance is required and must be configured 
+ * in hbase-site.xml.  
+ * 
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+@RunWith(FrameworkRunner.class)
+@CreateDS(
+    name = "hbase", 
+    enableChangeLog = false,
+    partitions =
+    { 
+        @CreatePartition(
+            name = "hbase", 
+            suffix = "o=hbase", 
+            type = HBaseDistributedPartition.class
+        ),
+        @CreatePartition(
+            name = "sevenSeas", 
+            suffix = "o=sevenSeas", 
+            type = HBaseDistributedPartition.class
+        ) 
+    })
+@CreateLdapServer(transports =
+    { @CreateTransport(protocol = "LDAP", port = 10389, nbThreads=48) })
+@Ignore
+public class HBaseDistributedRunner extends AbstractLdapTestUnit
+{
+    protected CoreSession session;
+
+
+    @Before
+    public void initTestData() throws Exception
+    {
+        session = ldapServer.getDirectoryService().getAdminSession();
+
+        if ( !session.exists( new DN( "o=hbase" ) ) || !session.exists( new DN( "ou=test-ou,o=hbase" ) )
+            || !session.exists( new DN( "cn=test-person,ou=test-ou,o=hbase" ) ) )
+        {
+            AbstractHBasePartitionIT.createBasicTestData( ldapServer );
+        }
+        if ( !session.exists( new DN( "ou=test1000,o=hbase" ) ) )
+        {
+            AbstractHBasePartitionIT.createTestData( 1000, "000", ldapServer );
+        }
+        if ( !session.exists( new DN( "ou=test10000,o=hbase" ) ) )
+        {
+            AbstractHBasePartitionIT.createTestData( 10000, "0000", ldapServer );
+        }
+        if ( !session.exists( new DN( "ou=test100000,o=hbase" ) ) )
+        {
+            AbstractHBasePartitionIT.createTestData( 100000, "00000", ldapServer );
+        }
+        AbstractHBasePartitionIT.compactDatabase( new HBaseConfiguration() );
+    }
+
+
+    @Test
+    public void runServer() throws Exception
+    {
+        System.out.println( "ApacheDS started on port " + ldapServer.getPort() + ", press any key to shutdown..." );
+        System.in.read();
+    }
+
+}
\ No newline at end of file

Added: directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseEmbeddedRunner.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseEmbeddedRunner.java?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseEmbeddedRunner.java (added)
+++ directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseEmbeddedRunner.java Fri Mar 19 16:59:00 2010
@@ -0,0 +1,118 @@
+/*
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing,
+ *   software distributed under the License is distributed on an
+ *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *   KIND, either express or implied.  See the License for the
+ *   specific language governing permissions and limitations
+ *   under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase;
+
+import org.apache.directory.server.annotations.CreateLdapServer;
+import org.apache.directory.server.annotations.CreateTransport;
+import org.apache.directory.server.core.CoreSession;
+import org.apache.directory.server.core.annotations.CreateDS;
+import org.apache.directory.server.core.annotations.CreateIndex;
+import org.apache.directory.server.core.annotations.CreatePartition;
+import org.apache.directory.server.core.integ.AbstractLdapTestUnit;
+import org.apache.directory.server.core.integ.FrameworkRunner;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserColumnIndex;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserRowIndex;
+import org.apache.directory.server.core.partition.hbase.it.AbstractHBasePartitionIT;
+import org.apache.directory.shared.ldap.name.DN;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+
+/**
+ * Starts up an ApacheDS LDAP server with two HBase partitions.
+ * The partitions starts up one embedded HBase partition.  
+ * 
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+@RunWith(FrameworkRunner.class)
+@CreateDS(
+    name = "hbase", 
+    enableChangeLog = false,
+    partitions =
+    { 
+        @CreatePartition(
+            name = "hbase", 
+            suffix = "o=hbase",
+            type = HBaseEmbeddedPartition.class,
+            cacheSize=1000,
+            indexes = {
+                @CreateIndex( attribute="cn", cacheSize=1000, type = HBaseUserColumnIndex.class ),
+                @CreateIndex( attribute="uid", cacheSize=1000, type = HBaseUserColumnIndex.class ),
+                
+                @CreateIndex( attribute="dc", cacheSize=10, type = HBaseUserRowIndex.class ),
+                @CreateIndex( attribute="o", cacheSize=10, type = HBaseUserRowIndex.class ),
+                @CreateIndex( attribute="ou", cacheSize=10, type = HBaseUserRowIndex.class ),
+
+                @CreateIndex( attribute="objectClass", cacheSize=100, type = HBaseUserRowIndex.class )
+            }
+        )
+        ,
+        @CreatePartition(
+            name = "sevenSeas", 
+            suffix = "o=sevenSeas", 
+            type = HBaseEmbeddedPartition.class
+        ) 
+    })
+@CreateLdapServer(transports =
+    { @CreateTransport(protocol = "LDAP", port = 10389, nbThreads=16) })
+@Ignore
+public class HBaseEmbeddedRunner extends AbstractLdapTestUnit
+{
+    protected CoreSession session;
+
+
+    @Before
+    public void initTestData() throws Exception
+    {
+        session = ldapServer.getDirectoryService().getAdminSession();
+
+        if ( !session.exists( new DN( "o=hbase" ) ) || !session.exists( new DN( "ou=test-ou,o=hbase" ) )
+            || !session.exists( new DN( "cn=test-person,ou=test-ou,o=hbase" ) ) )
+        {
+            AbstractHBasePartitionIT.createBasicTestData( ldapServer );
+        }
+        if ( !session.exists( new DN( "ou=test1000,o=hbase" ) ) )
+        {
+            AbstractHBasePartitionIT.createTestData( 1000, "000", ldapServer );
+        }
+//        if ( !session.exists( new DN( "ou=test10000,o=hbase" ) ) )
+//        {
+//            AbstractHBasePartitionIT.createTestData( 10000, "0000", ldapServer );
+//        }
+//        if ( !session.exists( new DN( "ou=test100000,o=hbase" ) ) )
+//        {
+//            AbstractHBasePartitionIT.createTestData( 100000, "00000", ldapServer );
+//        }
+        AbstractHBasePartitionIT.compactDatabase( new HBaseConfiguration() );
+    }
+
+
+    @Test
+    public void runServer() throws Exception
+    {
+        System.out.println( "ApacheDS started on port " + ldapServer.getPort() + ", press any key to shutdown..." );
+        System.in.read();
+    }
+
+}
\ No newline at end of file

Added: directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/JdbmRunner.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/JdbmRunner.java?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/JdbmRunner.java (added)
+++ directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/JdbmRunner.java Fri Mar 19 16:59:00 2010
@@ -0,0 +1,128 @@
+/*
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing,
+ *   software distributed under the License is distributed on an
+ *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *   KIND, either express or implied.  See the License for the
+ *   specific language governing permissions and limitations
+ *   under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase;
+
+import static org.apache.directory.server.integ.ServerIntegrationUtils.getWiredContext;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import javax.naming.NamingEnumeration;
+import javax.naming.directory.SearchControls;
+import javax.naming.directory.SearchResult;
+import javax.naming.ldap.LdapContext;
+
+import org.apache.directory.server.annotations.CreateLdapServer;
+import org.apache.directory.server.annotations.CreateTransport;
+import org.apache.directory.server.core.annotations.ApplyLdifFiles;
+import org.apache.directory.server.core.annotations.CreateDS;
+import org.apache.directory.server.core.annotations.CreateIndex;
+import org.apache.directory.server.core.annotations.CreatePartition;
+import org.apache.directory.server.core.integ.AbstractLdapTestUnit;
+import org.apache.directory.server.core.integ.FrameworkRunner;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+
+/**
+ * 
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+@RunWith(FrameworkRunner.class)
+@CreateDS(
+    name = "example", 
+    enableChangeLog = false,
+    partitions =
+    { 
+        @CreatePartition(
+            name = "example", 
+            suffix = "dc=example,dc=com",
+            cacheSize = 11000,
+            indexes = 
+            {
+                @CreateIndex(attribute="ou", cacheSize=100),
+                @CreateIndex(attribute="uid", cacheSize=11000),
+                @CreateIndex(attribute="objectClass", cacheSize=11000)
+            }
+        )
+    })
+@CreateLdapServer(transports =
+    { @CreateTransport(protocol = "LDAP", port = 10389) })
+@ApplyLdifFiles("jdbm.ldif")
+@Ignore
+public class JdbmRunner extends AbstractLdapTestUnit
+{
+
+    @Test
+    public void runServer() throws Exception
+    {
+        System.out.println( "ApacheDS started on port " + ldapServer.getPort() + ", press any key to shutdown..." );
+        System.in.read();
+    }
+    
+    
+    
+    //@Test
+    public void testIndexPerformance() throws Exception
+    {
+        LdapContext ctx = getWiredContext( ldapServer );
+
+        for(int run=0; run<5; run++)
+        {
+            long t0 = System.currentTimeMillis();
+            for ( int i = 1; i < 10000; i++ )
+            {
+                SearchControls searchControls = new SearchControls();
+                NamingEnumeration<SearchResult> results = ctx.search( "ou=test10000,dc=example,dc=com", "(uid=user." + i + ")",
+                    searchControls );
+                assertTrue( results.hasMore() );
+                SearchResult next = results.next();
+                assertNotNull( next );
+                assertFalse( results.hasMore() );
+                results.close();
+            }
+            long t1 = System.currentTimeMillis();
+            long t = t1 - t0;
+            System.out.println("indexed: " + t);
+            
+            
+            t0 = System.currentTimeMillis();
+            for ( int i = 1; i < 10000; i++ )
+            {
+                SearchControls searchControls = new SearchControls();
+                NamingEnumeration<SearchResult> results = ctx.search( "ou=test10000,dc=example,dc=com", "(employeeNumber=" + i + ")",
+                    searchControls );
+                assertTrue( results.hasMore() );
+                SearchResult next = results.next();
+                assertNotNull( next );
+                assertFalse( results.hasMore() );
+                results.close();
+            }
+            t1 = System.currentTimeMillis();
+            t = t1 - t0;
+            System.out.println("unindexed: " + t);
+        
+        }
+    }
+
+
+}
\ No newline at end of file

Added: directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/eval/ScanFilterTest.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/eval/ScanFilterTest.java?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/eval/ScanFilterTest.java (added)
+++ directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/eval/ScanFilterTest.java Fri Mar 19 16:59:00 2010
@@ -0,0 +1,169 @@
+/*
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing,
+ *   software distributed under the License is distributed on an
+ *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *   KIND, either express or implied.  See the License for the
+ *   specific language governing permissions and limitations
+ *   under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase.eval;
+
+
+import java.text.DecimalFormat;
+
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.FilterList;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.FilterList.Operator;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+
+/**
+ * Evaluates scan performance with different search filters.
+ * 
+ * Requires table 'TestTable' created with PE:
+ * $ bin/hbase org.apache.hadoop.hbase.PerformanceEvaluation --rows=10 sequentialWrite 1
+ * 
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+public class ScanFilterTest
+{
+
+    private static final int LOOPS = 100;
+    private static final int RANGE = 10;
+
+    private static final DecimalFormat format = new DecimalFormat( "0000000000" );
+
+    private static final byte[] START = Bytes.toBytes( "0000000000" );
+    private static final byte[] STOP = Bytes.toBytes( format.format( RANGE ) );
+
+    private static final byte[] FAMILY = Bytes.toBytes( "info" );
+    private static final byte[] QUALIFIER = Bytes.toBytes( "data" );
+    private static final byte[] DUMMY = Bytes.toBytes( "dummy" );
+
+    private static HTable table;
+
+
+    @BeforeClass
+    public static void init() throws Exception
+    {
+        table = new HTable( "TestTable" );
+    }
+
+
+    @Test
+    public void testNoFilter() throws Exception
+    {
+        Filter filter = null;
+        scan( filter, "Null Filter" );
+    }
+
+
+    @Test
+    public void testSingleColumValueFilterEqual() throws Exception
+    {
+        Filter filter = new SingleColumnValueFilter( FAMILY, QUALIFIER, CompareOp.EQUAL, DUMMY );
+        scan( filter, "SingleColumValueFilter Equal" );
+    }
+
+
+    @Test
+    public void testSingleColumValueFilterNotEqual() throws Exception
+    {
+        Filter filter = new SingleColumnValueFilter( FAMILY, QUALIFIER, CompareOp.NOT_EQUAL, DUMMY );
+        scan( filter, "SingleColumValueFilter Not Equal" );
+    }
+
+
+    @Test
+    public void testPrefixFilter() throws Exception
+    {
+        Filter filter = new PrefixFilter( Bytes.toBytes( "0" ) );
+        scan( filter, "PrefixFilter" );
+    }
+
+
+    @Test
+    public void testGet() throws Exception
+    {
+        long t0 = System.currentTimeMillis();
+        for ( int i = 0; i < LOOPS; i++ )
+        {
+            for ( int n = 0; n < RANGE; n++ )
+            {
+                Get get = new Get( Bytes.toBytes( format.format( n ) ) );
+                get.addFamily( FAMILY );
+                table.get( get );
+            }
+        }
+        long t1 = System.currentTimeMillis();
+        System.out.println( "Get: " + ( t1 - t0 ) + "ms" );
+    }
+
+
+    private void scan( Filter filter, String label ) throws Exception
+    {
+        FilterList filterList = new FilterList( Operator.MUST_PASS_ALL );
+        filterList.addFilter( filter );
+
+        FilterList nestedFilterList = new FilterList( Operator.MUST_PASS_ALL );
+        nestedFilterList.addFilter( filterList );
+
+        FilterList nestedNestedFilterList = new FilterList( Operator.MUST_PASS_ALL );
+        nestedNestedFilterList.addFilter( nestedFilterList );
+
+        doScan( filter, label );
+        doScan( filterList, "FilterList with " + label );
+        doScan( nestedFilterList, "Nested FilterList with " + label );
+        doScan( nestedNestedFilterList, "Nested Nested FilterList with " + label );
+    }
+
+
+    private void doScan( Filter filter, String label ) throws Exception
+    {
+        Scan scan = new Scan();
+        scan.setStartRow( START );
+        scan.setStopRow( STOP );
+        //scan.setCaching( 1000 );
+        //scan.setCacheBlocks( true );
+        scan.addFamily( FAMILY );
+        scan.setFilter( filter );
+
+        long t0 = System.currentTimeMillis();
+        int count = 0;
+        for ( int i = 0; i < LOOPS; i++ )
+        {
+            count = 0;
+            ResultScanner scanner = table.getScanner( scan );
+            for ( Result result : scanner )
+            {
+                count++;
+            }
+            scanner.close();
+        }
+        long t1 = System.currentTimeMillis();
+        System.out.println( "Scan " + label + " (" + count + "): " + ( t1 - t0 ) + "ms" );
+    }
+
+}

Added: directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/AbstractHBasePartitionIT.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/AbstractHBasePartitionIT.java?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/AbstractHBasePartitionIT.java (added)
+++ directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/AbstractHBasePartitionIT.java Fri Mar 19 16:59:00 2010
@@ -0,0 +1,189 @@
+/*
+ *   Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   "License"); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing,
+ *   software distributed under the License is distributed on an
+ *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *   KIND, either express or implied.  See the License for the
+ *   specific language governing permissions and limitations
+ *   under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase.it;
+
+
+import java.text.DecimalFormat;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.directory.server.core.CoreSession;
+import org.apache.directory.server.core.entry.DefaultServerEntry;
+import org.apache.directory.server.core.entry.ServerEntry;
+import org.apache.directory.server.core.integ.AbstractLdapTestUnit;
+import org.apache.directory.server.core.partition.hbase.HBaseClusterTestCaseAdapter;
+import org.apache.directory.server.ldap.LdapServer;
+import org.apache.directory.shared.ldap.name.DN;
+import org.apache.directory.shared.ldap.schema.SchemaManager;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public abstract class AbstractHBasePartitionIT extends AbstractLdapTestUnit
+{
+    private static final Logger LOG = LoggerFactory.getLogger( AbstractHBasePartitionIT.class );
+
+    protected static HBaseClusterTestCaseAdapter adapter;
+    protected CoreSession session;
+
+
+    @BeforeClass
+    public static void before() throws Exception
+    {
+        adapter = new HBaseClusterTestCaseAdapter( AbstractHBasePartitionIT.class, true );
+        adapter.setUp();
+    }
+
+
+    @AfterClass
+    public static void after() throws Exception
+    {
+        adapter.tearDown();
+    }
+
+
+    @Before
+    public void setUp() throws Exception
+    {
+        session = ldapServer.getDirectoryService().getAdminSession();
+    }
+
+    public static String[] TABLES =
+        { "apacheds_hbase_master", "apacheds_hbase_tree", "apacheds_hbase_index_objectClass",
+            "apacheds_hbase_index_dc", "apacheds_hbase_index_o", "apacheds_hbase_index_ou", "apacheds_hbase_index_uid",
+            "apacheds_hbase_index_cn" };
+
+
+    public static void createBasicTestData( LdapServer ldapServer ) throws Exception
+    {
+        CoreSession session = ldapServer.getDirectoryService().getAdminSession();
+        SchemaManager schemaManager = ldapServer.getDirectoryService().getSchemaManager();
+
+        LOG.debug( "Adding basic test data..." );
+
+        if ( !session.exists( new DN( "o=hbase" ) ) )
+        {
+            ServerEntry entry = new DefaultServerEntry( schemaManager, new DN( "o=hbase" ) );
+            entry.add( "objectClass", "top", "organization" );
+            entry.add( "o", "hbase" );
+            session.add( entry );
+            LOG.debug( "  added " + entry.getDn().getName() );
+        }
+
+        if ( !session.exists( new DN( "ou=test-ou,o=hbase" ) ) )
+        {
+            ServerEntry entry = new DefaultServerEntry( schemaManager, new DN( "ou=test-ou,o=hbase" ) );
+            entry.add( "objectClass", "top", "organizationalUnit" );
+            entry.add( "ou", "test-ou" );
+            session.add( entry );
+            LOG.debug( "  added " + entry.getDn().getName() );
+        }
+
+        if ( !session.exists( new DN( "cn=test-person,ou=test-ou,o=hbase" ) ) )
+        {
+            ServerEntry entry = new DefaultServerEntry( schemaManager, new DN( "cn=test-person,ou=test-ou,o=hbase" ) );
+            entry.add( "objectClass", "top", "person" );
+            entry.add( "cn", "test-person" );
+            entry.add( "Sn", "test-person" );
+            entry.add( "userPassword", "secret" );
+            session.add( entry );
+            LOG.debug( "  added " + entry.getDn().getName() );
+        }
+
+        LOG.debug( "...done" );
+    }
+
+
+    public static void createTestData( int number, String pattern, LdapServer ldapServer ) throws Exception
+    {
+        createTestData( number, 0, number, pattern, ldapServer );
+    }
+
+
+    public static void createTestData( int number, int start, int stop, String pattern, LdapServer ldapServer )
+        throws Exception
+    {
+        CoreSession session = ldapServer.getDirectoryService().getAdminSession();
+        SchemaManager schemaManager = ldapServer.getDirectoryService().getSchemaManager();
+
+        LOG.debug( "Adding test data " + number + "..." );
+
+        DN dn = new DN( "ou=test" + number + ",o=hbase" );
+        if ( !session.exists( dn ) )
+        {
+            ServerEntry entry = new DefaultServerEntry( schemaManager, dn );
+            entry.add( "objectClass", "top", "organizationalUnit" );
+            entry.add( "ou", "test" + number );
+            session.add( entry );
+        }
+
+        DecimalFormat df = new DecimalFormat( pattern );
+        for ( int i = start; i < stop; i++ )
+        {
+            String s = df.format( i );
+
+            dn = new DN( "cn=test" + s + ",ou=test" + number + ",o=hbase" );
+            if ( session.exists( dn ) )
+            {
+                continue;
+            }
+
+            ServerEntry entry = new DefaultServerEntry( schemaManager, dn );
+            entry.add( "objectClass", "top", "person", "organizationalPerson", "inetOrgPerson" );
+            entry.add( "cn", "test" + s );
+            entry.add( "Sn", "test" + s );
+            entry.add( "telephoneNumber", RandomStringUtils.randomNumeric( 13 ) );
+            entry.add( "mail", "test" + s + "@example.com" );
+            entry.add( "userPassword", "secret" );
+
+            session.add( entry );
+
+            if ( i > start && i % 1000 == 0 )
+            {
+                LOG.debug( "  " + System.currentTimeMillis() + " -> created " + s + " entries." );
+            }
+        }
+
+        LOG.debug( "...done" );
+    }
+
+
+    public static void compactDatabase( HBaseConfiguration conf ) throws Exception
+    {
+        HBaseAdmin admin = new HBaseAdmin( conf );
+        for ( String table : TABLES )
+        {
+            if ( admin.tableExists( table ) )
+            {
+                admin.flush( table );
+                Thread.sleep( 10000 );
+                // admin.compact( table );
+                // Thread.sleep( 10000 );
+                // admin.majorCompact( table );
+                // Thread.sleep( 10000 );
+            }
+        }
+    }
+
+}