You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by se...@apache.org on 2010/03/19 17:59:01 UTC
svn commit: r925326 [3/4] - in
/directory/sandbox/seelmann/hbase-partition-test: ./ src/ src/main/
src/test/ src/test/java/ src/test/java/org/ src/test/java/org/apache/
src/test/java/org/apache/directory/
src/test/java/org/apache/directory/server/ src/...
Added: directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteLdifImport.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteLdifImport.java?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteLdifImport.java (added)
+++ directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteLdifImport.java Fri Mar 19 16:59:00 2010
@@ -0,0 +1,207 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase.it.mapreduce;
+
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.directory.server.core.partition.hbase.mapreduce.IndexBuilder;
+import org.apache.directory.server.core.partition.hbase.mapreduce.LdifImporter;
+import org.apache.directory.server.core.partition.hbase.mapreduce.LdifInputFormat;
+import org.apache.directory.server.core.partition.hbase.mapreduce.TreeTableBuilder;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
+
+
+/**
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+public class RemoteLdifImport
+{
+
+ public static void main( String[] args ) throws Exception
+ {
+ cleanMasterTable();
+ cleanTreeTable();
+ cleanIndexTables();
+
+ importLdif( "/user/stefan/input/ldif/com-example-10000.ldif", 2 );
+ importLdif( "/user/stefan/input/ldif/com-example-10000.ldif", 3 );
+ importLdif( "/user/stefan/input/ldif/com-example-10000.ldif", 4 );
+ importLdif( "/user/stefan/input/ldif/com-example-100000.ldif", 3 );
+ importLdif( "/user/stefan/input/ldif/com-example-100000.ldif", 4 );
+ importLdif( "/user/stefan/input/ldif/com-example-1000000.ldif", 3 );
+ importLdif( "/user/stefan/input/ldif/com-example-1000000.ldif", 4 );
+
+ cleanTreeTable();
+ buildTreeTable();
+
+ cleanIndexTables();
+ buildIndices();
+ }
+
+
+ private static void importLdif( String file, int nameComponentCount ) throws IOException, InterruptedException,
+ ClassNotFoundException
+ {
+ long t0 = System.currentTimeMillis();
+ Configuration conf = new Configuration();
+ conf.set( LdifImporter.NAME_COMPONENT_COUNT, "" + nameComponentCount );
+ conf.set( LdifImporter.SUFFIX, "dc=example,dc=com" );
+ conf.set( LdifImporter.TABLE_PREFIX, "apacheds_example_" );
+
+ conf.set( "mapred.map.max.attempts", "1" );
+ conf.set( "mapred.map.tasks.speculative.execution", "false" );
+
+ Job job = new Job( conf, "Import LDIF" );
+ job.setJarByClass( LdifImporter.class );
+
+ FileInputFormat.addInputPath( job, new Path( file ) );
+ job.setInputFormatClass( LdifInputFormat.class );
+ job.setMapperClass( LdifImporter.class );
+
+ TableMapReduceUtil.initTableReducerJob( ".META.", null, job );
+ job.setNumReduceTasks( 0 );
+
+ boolean status = job.waitForCompletion( true );
+ long t1 = System.currentTimeMillis();
+ long t = t1 - t0;
+ System.out.println( "Import LDIF " + status + " in " + t + "ms." );
+ System.out.println( job.getCounters() );
+ System.out.println();
+ }
+
+
+ private static void buildTreeTable() throws IOException, InterruptedException, ClassNotFoundException
+ {
+ // build indices
+ long t0 = System.currentTimeMillis();
+ Configuration conf = new Configuration();
+ //conf.set( LdifImporter.NAME_COMPONENT_COUNT, ""+i );
+ conf.set( TreeTableBuilder.SUFFIX, "dc=example,dc=com" );
+ conf.set( TreeTableBuilder.TABLE_PREFIX, "apacheds_example_" );
+
+ conf.set( "mapred.map.max.attempts", "1" );
+ conf.set( "mapred.map.tasks.speculative.execution", "false" );
+
+ Job job = new Job( conf, "Build Tree Table" );
+ job.setJarByClass( TreeTableBuilder.class );
+
+ TableMapReduceUtil.initTableMapperJob( "apacheds_example_master", new Scan(), TreeTableBuilder.class, null,
+ null, job );
+ job.setOutputFormatClass( NullOutputFormat.class );
+ job.setNumReduceTasks( 0 );
+
+ boolean status = job.waitForCompletion( true );
+ long t1 = System.currentTimeMillis();
+ long t = t1 - t0;
+ System.out.println( "Build Tree Table " + status + " in " + t + "ms." );
+ System.out.println( job.getCounters() );
+ System.out.println();
+ }
+
+
+ private static void buildIndices() throws IOException, InterruptedException, ClassNotFoundException
+ {
+ // build indices
+ long t0 = System.currentTimeMillis();
+ Configuration conf = new Configuration();
+ //conf.set( LdifImporter.NAME_COMPONENT_COUNT, ""+i );
+ conf.set( IndexBuilder.SUFFIX, "dc=example,dc=com" );
+ conf.set( IndexBuilder.TABLE_PREFIX, "apacheds_example_" );
+ conf.set( IndexBuilder.COLUMN_INDICES, "cn,uid" );
+ conf.set( IndexBuilder.ROW_INDICES, "dc,o,ou,objectClass" );
+
+ conf.set( "mapred.map.max.attempts", "1" );
+ conf.set( "mapred.map.tasks.speculative.execution", "false" );
+
+ Job job = new Job( conf, "Build Index" );
+ job.setJarByClass( IndexBuilder.class );
+
+ TableMapReduceUtil.initTableMapperJob( "apacheds_example_master", new Scan(), IndexBuilder.class, null, null,
+ job );
+ job.setOutputFormatClass( NullOutputFormat.class );
+ job.setNumReduceTasks( 0 );
+
+ boolean status = job.waitForCompletion( true );
+ long t1 = System.currentTimeMillis();
+ long t = t1 - t0;
+ System.out.println( "Build Index " + status + " in " + t + "ms." );
+ System.out.println( job.getCounters() );
+ System.out.println();
+ }
+
+
+ private static void cleanMasterTable() throws MasterNotRunningException, IOException
+ {
+ List<String> tableNames = new ArrayList<String>();
+ tableNames.add( "apacheds_example_master" );
+ cleanTables( tableNames );
+ }
+
+
+ private static void cleanTreeTable() throws MasterNotRunningException, IOException
+ {
+ List<String> tableNames = new ArrayList<String>();
+ tableNames.add( "apacheds_example_tree" );
+ cleanTables( tableNames );
+ }
+
+
+ private static void cleanIndexTables() throws MasterNotRunningException, IOException
+ {
+ List<String> tableNames = new ArrayList<String>();
+ tableNames.add( "apacheds_example_index_cn" );
+ tableNames.add( "apacheds_example_index_uid" );
+ tableNames.add( "apacheds_example_index_dc" );
+ tableNames.add( "apacheds_example_index_o" );
+ tableNames.add( "apacheds_example_index_ou" );
+ tableNames.add( "apacheds_example_index_objectClass" );
+ cleanTables( tableNames );
+ }
+
+
+ private static void cleanTables( List<String> tableNames ) throws MasterNotRunningException, IOException
+ {
+ HBaseAdmin admin = new HBaseAdmin( new HBaseConfiguration() );
+ for ( String tableName : tableNames )
+ {
+ System.out.println( "Deleting table " + tableName );
+ if ( admin.tableExists( tableName ) )
+ {
+ admin.disableTable( tableName );
+ admin.deleteTable( tableName );
+ }
+ }
+ }
+
+}
\ No newline at end of file
Added: directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteRunner.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteRunner.java?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteRunner.java (added)
+++ directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteRunner.java Fri Mar 19 16:59:00 2010
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+package org.apache.directory.server.core.partition.hbase.it.mapreduce;
+
+import org.apache.directory.server.annotations.CreateLdapServer;
+import org.apache.directory.server.annotations.CreateTransport;
+import org.apache.directory.server.core.DirectoryService;
+import org.apache.directory.server.core.annotations.CreateDS;
+import org.apache.directory.server.core.annotations.CreateIndex;
+import org.apache.directory.server.core.annotations.CreatePartition;
+import org.apache.directory.server.core.factory.DSAnnotationProcessor;
+import org.apache.directory.server.core.partition.hbase.HBaseDistributedPartition;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserColumnIndex;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserRowIndex;
+import org.apache.directory.server.factory.ServerAnnotationProcessor;
+import org.apache.directory.server.ldap.LdapServer;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+
+
+/**
+ * Starts up an ApacheDS LDAP server.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+@CreateDS(
+ name = "example",
+ enableChangeLog = false,
+ partitions =
+ {
+ @CreatePartition(
+ name = "example",
+ suffix = "dc=example,dc=com",
+ type = HBaseDistributedPartition.class,
+ cacheSize = 10,
+ indexes = {
+ @CreateIndex( attribute="cn", type = HBaseUserColumnIndex.class ),
+ @CreateIndex( attribute="uid", type = HBaseUserColumnIndex.class ),
+
+ @CreateIndex( attribute="dc", type = HBaseUserRowIndex.class ),
+ @CreateIndex( attribute="o", type = HBaseUserRowIndex.class ),
+ @CreateIndex( attribute="ou", type = HBaseUserRowIndex.class ),
+
+ @CreateIndex( attribute="objectClass", type = HBaseUserRowIndex.class )
+ }
+ )
+ })
+@CreateLdapServer(transports =
+ { @CreateTransport(protocol = "LDAP", port = 10389, nbThreads=48) })
+public class RemoteRunner
+{
+
+ public static void main( String[] args ) throws Exception
+ {
+ DirectoryService service = DSAnnotationProcessor.getDirectoryService();
+ LdapServer ldapServer = ServerAnnotationProcessor.getLdapServer( service, 10389 );
+ System.out.println( "ApacheDS started on port " + ldapServer.getPort() + ", press any key to shutdown..." );
+ System.in.read();
+ ldapServer.stop();
+ service.shutdown();
+ }
+
+
+ private static void compact() throws Exception
+ {
+ HBaseAdmin admin = new HBaseAdmin( new HBaseConfiguration() );
+ String[] TABLES =
+ { "apacheds_example_master", "apacheds_example_tree", "apacheds_example_index_objectClass",
+ "apacheds_example_index_dc", "apacheds_example_index_o", "apacheds_example_index_ou", "apacheds_example_index_uid",
+ "apacheds_example_index_cn" };
+ for ( String table : TABLES )
+ {
+ if ( admin.tableExists( table ) )
+ {
+ // admin.flush( table );
+ // Thread.sleep( 10000 );
+ // admin.compact( table );
+ // Thread.sleep( 10000 );
+ admin.majorCompact( table );
+ Thread.sleep( 10000 );
+ }
+ }
+ }
+}
\ No newline at end of file
Added: directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/AbstractHBaseTableTest.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/AbstractHBaseTableTest.java?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/AbstractHBaseTableTest.java (added)
+++ directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/AbstractHBaseTableTest.java Fri Mar 19 16:59:00 2010
@@ -0,0 +1,172 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.directory.server.core.partition.hbase.table;
+
+
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.util.UUID;
+
+import javax.naming.InvalidNameException;
+import javax.naming.NamingException;
+
+import org.apache.directory.server.core.entry.DefaultServerEntry;
+import org.apache.directory.server.core.partition.hbase.HBaseClusterTestCaseAdapter;
+import org.apache.directory.server.core.partition.hbase.HBaseStore;
+import org.apache.directory.server.core.partition.hbase.it.AbstractHBasePartitionIT;
+import org.apache.directory.shared.ldap.csn.CsnFactory;
+import org.apache.directory.shared.ldap.name.DN;
+import org.apache.directory.shared.ldap.schema.SchemaManager;
+import org.apache.directory.shared.ldap.schema.ldif.extractor.SchemaLdifExtractor;
+import org.apache.directory.shared.ldap.schema.ldif.extractor.impl.DefaultSchemaLdifExtractor;
+import org.apache.directory.shared.ldap.schema.loader.ldif.LdifSchemaLoader;
+import org.apache.directory.shared.ldap.schema.manager.impl.DefaultSchemaManager;
+import org.apache.directory.shared.ldap.util.ExceptionUtils;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+
+/**
+ * Basic class for table tests.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+public abstract class AbstractHBaseTableTest
+{
+ protected static final String TABLE_PREFIX = "apacheds_test_";
+ protected static final CsnFactory CSN_FACTORY = new CsnFactory( 0 );
+ protected static HBaseClusterTestCaseAdapter adapter;
+ protected static SchemaManager schemaManager;
+ protected static HBaseStore store;
+ protected static DN suffixDn;
+
+ protected static final UUID CONTEXT_ENTRY_ID = UUID.fromString( "00000000-0000-0000-0000-000000000001" );
+ protected static final UUID OU_SALES_ID = UUID.fromString( "00000000-0000-0000-0000-000000000002" );
+ protected static final UUID CN_JOHNNY_WALKER_UUID = UUID.fromString( "00000000-0000-0000-0000-000000000003" );
+
+ /**
+ * Startup mini HBase cluster.
+ */
+ @BeforeClass
+ public static void statupMiniCluster() throws Exception
+ {
+ adapter = new HBaseClusterTestCaseAdapter( AbstractHBasePartitionIT.class, false );
+ adapter.setUp();
+ }
+
+
+ /**
+ * Shutdown mini HBase cluster.
+ */
+ @AfterClass
+ public static void shutdownMiniCluster() throws Exception
+ {
+ adapter.tearDown();
+ }
+
+
+ /**
+ * Init schema manager and suffix
+ */
+ @BeforeClass
+ public static void initSchemaManager() throws Exception
+ {
+ String workingDirectory = System.getProperty( "workingDirectory" );
+
+ if ( workingDirectory == null )
+ {
+ String path = AbstractHBaseTableTest.class.getResource( "" ).getPath();
+ int targetPos = path.indexOf( "target" );
+ workingDirectory = path.substring( 0, targetPos + 6 );
+ }
+
+ File schemaRepository = new File( workingDirectory, "schema" );
+ SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor( new File( workingDirectory ) );
+ extractor.extractOrCopy( true );
+ LdifSchemaLoader loader = new LdifSchemaLoader( schemaRepository );
+ schemaManager = new DefaultSchemaManager( loader );
+
+ boolean loaded = schemaManager.loadAllEnabled();
+
+ if ( !loaded )
+ {
+ fail( "Schema load failed : " + ExceptionUtils.printErrors( schemaManager.getErrors() ) );
+ }
+
+ suffixDn = new DN( "o=Good Times Co." );
+ suffixDn.normalize( schemaManager.getNormalizerMapping() );
+
+ store = new HBaseStore();
+ store.setSuffixDn( suffixDn.getName() );
+ store.setCacheSize( 100 );
+ store.setTablePrefix( TABLE_PREFIX );
+ store.init( schemaManager );
+ }
+
+
+ protected DefaultServerEntry buildContextEntry() throws NamingException
+ {
+ DefaultServerEntry entry = new DefaultServerEntry( schemaManager, suffixDn );
+ entry.add( "objectClass", "organization" );
+ entry.add( "o", "Good Times Co." );
+ entry.add( "postalCode", "1" );
+ entry.add( "postOfficeBox", "1" );
+ entry.add( "entryCsn", CSN_FACTORY.newInstance().toString() );
+ entry.add( "entryUUID", CONTEXT_ENTRY_ID.toString() );
+ return entry;
+ }
+
+
+ protected DefaultServerEntry buildOuSalesEntry() throws InvalidNameException, NamingException
+ {
+ DN dn = new DN( "ou=Sales \\E6\\97\\A5\\E6\\9C\\AC,o=Good Times Co." );
+ dn.normalize( schemaManager.getNormalizerMapping() );
+ DefaultServerEntry entry = new DefaultServerEntry( schemaManager, dn );
+ entry.add( "objectClass", "top", "organizationalUnit" );
+ entry.add( "ou", "Sales \u65E5\u672C" );
+ entry.add( "postalCode", "1" );
+ entry.add( "postOfficeBox", "1" );
+ entry.add( "entryCsn", CSN_FACTORY.newInstance().toString() );
+ entry.add( "entryUUID", OU_SALES_ID.toString() );
+ return entry;
+ }
+
+
+ protected DefaultServerEntry buildCnJohnnyWalkerEntry() throws InvalidNameException, NamingException
+ {
+ DN dn = new DN(
+ "cn=JOhnny \\E6\\97\\A5\\E6\\9C\\AC WAlkeR,ou=Sales \\E6\\97\\A5\\E6\\9C\\AC,o=Good Times Co." );
+ dn.normalize( schemaManager.getNormalizerMapping() );
+ DefaultServerEntry entry = new DefaultServerEntry( schemaManager, dn );
+ entry.add( "objectClass", "top", "person", "organizationalPerson" );
+ entry.add( "ou", "Sales" );
+ entry.add( "cn", "JOhnny \u65E5\u672C WAlkeR" );
+ entry.add( "sn", "WAlkeR" );
+ entry.add( "postalCode", "3" );
+ entry.add( "postOfficeBox", "3" );
+ entry.add( "jpegPhoto", new byte[]
+ { 0x00, 0x01, 0x7F, ( byte ) 0x80, ( byte ) 0x81, ( byte ) 0xFF, } );
+ entry.add( "entryCsn", CSN_FACTORY.newInstance().toString() );
+ entry.add( "entryUUID", CN_JOHNNY_WALKER_UUID.toString() );
+ return entry;
+ }
+
+}
Added: directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTableTest.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTableTest.java?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTableTest.java (added)
+++ directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTableTest.java Fri Mar 19 16:59:00 2010
@@ -0,0 +1,547 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.directory.server.core.partition.hbase.table;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.util.List;
+import java.util.NavigableMap;
+import java.util.UUID;
+
+import org.apache.directory.server.core.entry.DefaultServerEntry;
+import org.apache.directory.server.core.partition.hbase.Utils;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserColumnIndex;
+import org.apache.directory.server.core.partition.hbase.index.HBaseUserRowIndex;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Before;
+import org.junit.Test;
+
+
+/**
+ * Tests for {@link HBaseIndexTableBase}.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+public class HBaseIndexTableTest extends AbstractHBaseTableTest
+{
+ private static final String OBJECTCLASS_INDEX_TABLE_NAME = TABLE_PREFIX + "index_objectClass";
+ private static final String JPEGPHOTO_INDEX_TABLE_NAME = TABLE_PREFIX + "index_jpegPhoto";
+ private static final String CN_INDEX_TABLE_NAME = TABLE_PREFIX + "index_cn";
+ private static final String USERCERTIFICATE_INDEX_TABLE_NAME = TABLE_PREFIX + "index_userCertificate";
+ private HBaseRowIndexTable objectClassIndexTable;
+ private HBaseRowIndexTable jpegPhotoIndexTable;
+ private HBaseColumnIndexTable cnIndexTable;
+ private HBaseColumnIndexTable userCertificateIndexTable;
+
+
+ /**
+ * Cleanup HBase tables before each test.
+ *
+ */
+ @Before
+ public void setup() throws Exception
+ {
+ HBaseAdmin admin = new HBaseAdmin( adapter.getHBaseConfigurtion() );
+ if ( admin.tableExists( CN_INDEX_TABLE_NAME ) )
+ {
+ HTable cnIndexHTable = new HTable( CN_INDEX_TABLE_NAME );
+ ResultScanner masterScanner = cnIndexHTable.getScanner( new Scan() );
+ Result masterResult;
+ while ( ( masterResult = masterScanner.next() ) != null )
+ {
+ cnIndexHTable.delete( new Delete( masterResult.getRow() ) );
+ }
+ }
+ if ( admin.tableExists( OBJECTCLASS_INDEX_TABLE_NAME ) )
+ {
+ HTable objectClassIndexHTable = new HTable( OBJECTCLASS_INDEX_TABLE_NAME );
+ ResultScanner treeScanner = objectClassIndexHTable.getScanner( new Scan() );
+ Result treeResult;
+ while ( ( treeResult = treeScanner.next() ) != null )
+ {
+ objectClassIndexHTable.delete( new Delete( treeResult.getRow() ) );
+ }
+ }
+
+ // objectClassIndexTable = new HBaseRowIndexTable( "2.5.4.0", store, 100 );
+ // jpegPhotoIndexTable = new HBaseRowIndexTable( "0.9.2342.19200300.100.1.60", store, 100 );
+ // cnIndexTable = new HBaseColumnIndexTable( "2.5.4.3", schemaManager, TABLE_PREFIX, adapter.conf, 100 );
+ // userCertificateIndexTable = new HBaseColumnIndexTable( "2.5.4.36", schemaManager, TABLE_PREFIX, adapter.conf,
+ // 100 );
+
+ HBaseUserRowIndex index = new HBaseUserRowIndex();
+ index.setAttributeId( "2.5.4.0" );
+ index.setCacheSize( 100 );
+ index.setStore( store );
+ objectClassIndexTable = index.getIndexTable();
+ store.addIndex( index );
+
+ index = new HBaseUserRowIndex();
+ index.setAttributeId( "0.9.2342.19200300.100.1.60" );
+ index.setStore( store );
+ jpegPhotoIndexTable = index.getIndexTable();
+ store.addIndex( index );
+
+ HBaseUserColumnIndex index2 = new HBaseUserColumnIndex();
+ index2.setAttributeId( "2.5.4.3" );
+ index2.setStore( store );
+ cnIndexTable = index2.getIndexTable();
+ store.addIndex( index );
+
+ index2 = new HBaseUserColumnIndex();
+ index2.setAttributeId( "2.5.4.36" );
+ index2.setStore( store );
+ userCertificateIndexTable = index2.getIndexTable();
+ store.addIndex( index );
+
+ }
+
+
+ @Test
+ public void first() throws Exception
+ {
+ DefaultServerEntry entry = buildContextEntry();
+ objectClassIndexTable.add( entry.get( "objectClass" ).get( 0 ).getBytes(), CONTEXT_ENTRY_ID );
+ }
+
+
+ @Test
+ public void testAdd() throws Exception
+ {
+ // 1st entry
+ DefaultServerEntry entry = buildContextEntry();
+ objectClassIndexTable.add( entry.get( "objectClass" ).get( 0 ).getBytes(), CONTEXT_ENTRY_ID );
+
+ HTable objectClassIndexHTable = new HTable( OBJECTCLASS_INDEX_TABLE_NAME );
+
+ Get equalGet = new Get( Bytes.toBytes( "=organization\u000000000000-0000-0000-0000-000000000001" ) );
+ assertTrue( objectClassIndexHTable.exists( equalGet ) );
+ Result equalResult = objectClassIndexHTable.get( equalGet );
+
+ NavigableMap<byte[], byte[]> equalInfoMap = equalResult.getFamilyMap( Bytes.toBytes( "info" ) );
+ assertNotNull( equalInfoMap );
+ assertEquals( 1, equalInfoMap.size() );
+ assertEquals( "e", Bytes.toString( equalInfoMap.get( Bytes.toBytes( "status" ) ) ) );
+
+ // 2nd entry
+ entry = buildOuSalesEntry();
+ objectClassIndexTable.add( entry.get( "objectClass" ).get( 0 ).getBytes(), OU_SALES_ID );
+ objectClassIndexTable.add( entry.get( "objectClass" ).get( 1 ).getBytes(), OU_SALES_ID );
+
+ // 3rd entry
+ entry = buildCnJohnnyWalkerEntry();
+ objectClassIndexTable.add( entry.get( "objectClass" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.add( entry.get( "objectClass" ).get( 1 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.add( entry.get( "objectClass" ).get( 2 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ cnIndexTable.add( entry.get( "cn" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+
+ HTable cnIndexHTable = new HTable( CN_INDEX_TABLE_NAME );
+
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=organization\u000000000000-0000-0000-0000-000000000001" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=organizationalunit\u000000000000-0000-0000-0000-000000000002" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=top\u000000000000-0000-0000-0000-000000000002" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=person\u000000000000-0000-0000-0000-000000000003" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=organizationalperson\u000000000000-0000-0000-0000-000000000003" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=top\u000000000000-0000-0000-0000-000000000003" ) ) ) );
+
+ assertTrue( cnIndexHTable.exists( new Get( Bytes.toBytes( "=johnny \u65E5\u672C walker" ) ) ) );
+
+ Result cnCountResult = cnIndexHTable.get( new Get( Bytes.toBytes( "=johnny \u65E5\u672C walker" ) ) );
+ NavigableMap<byte[], byte[]> cnCountInfoMap = cnCountResult.getFamilyMap( Bytes.toBytes( "info" ) );
+ assertNotNull( cnCountInfoMap );
+ assertEquals( 1, cnCountInfoMap.size() );
+ }
+
+
+ @Test
+ public void testDelete() throws Exception
+ {
+ // 1st entry
+ DefaultServerEntry contextEntry = buildContextEntry();
+ objectClassIndexTable.add( contextEntry.get( "objectClass" ).get( 0 ).getBytes(), CONTEXT_ENTRY_ID );
+
+ // 2nd entry
+ DefaultServerEntry ouSalesEntry = buildOuSalesEntry();
+ objectClassIndexTable.add( ouSalesEntry.get( "objectClass" ).get( 0 ).getBytes(), OU_SALES_ID );
+ objectClassIndexTable.add( ouSalesEntry.get( "objectClass" ).get( 1 ).getBytes(), OU_SALES_ID );
+
+ // 3rd entry
+ DefaultServerEntry cnEntry = buildCnJohnnyWalkerEntry();
+ objectClassIndexTable.add( cnEntry.get( "objectClass" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.add( cnEntry.get( "objectClass" ).get( 1 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.add( cnEntry.get( "objectClass" ).get( 2 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ cnIndexTable.add( cnEntry.get( "cn" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+
+ HTable objectClassIndexHTable = new HTable( OBJECTCLASS_INDEX_TABLE_NAME );
+ HTable cnIndexHTable = new HTable( CN_INDEX_TABLE_NAME );
+
+ // detete 3rd entry
+ objectClassIndexTable.drop( cnEntry.get( "objectClass" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.drop( cnEntry.get( "objectClass" ).get( 1 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.drop( cnEntry.get( "objectClass" ).get( 2 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ cnIndexTable.drop( cnEntry.get( "cn" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=organization\u000000000000-0000-0000-0000-000000000001" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=organizationalunit\u000000000000-0000-0000-0000-000000000002" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=top\u000000000000-0000-0000-0000-000000000002" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=person\u000000000000-0000-0000-0000-000000000003" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=organizationalperson\u000000000000-0000-0000-0000-000000000003" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=top\u000000000000-0000-0000-0000-000000000003" ) ) ) );
+
+ assertTrue( cnIndexHTable.exists( new Get( Bytes.toBytes( "=johnny \u65E5\u672C walker" ) ) ) );
+
+ // detete 2nd and 1st entry
+ objectClassIndexTable.drop( ouSalesEntry.get( "objectClass" ).get( 0 ).getBytes(), OU_SALES_ID );
+ objectClassIndexTable.drop( ouSalesEntry.get( "objectClass" ).get( 1 ).getBytes(), OU_SALES_ID );
+ objectClassIndexTable.drop( contextEntry.get( "objectClass" ).get( 0 ).getBytes(), CONTEXT_ENTRY_ID );
+
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=organization\u000000000000-0000-0000-0000-000000000001" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=organizationalunit\u000000000000-0000-0000-0000-000000000002" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=top\u000000000000-0000-0000-0000-000000000002" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=person\u000000000000-0000-0000-0000-000000000003" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=organizationalperson\u000000000000-0000-0000-0000-000000000003" ) ) ) );
+ assertTrue( objectClassIndexHTable.exists( new Get( Bytes
+ .toBytes( "=top\u000000000000-0000-0000-0000-000000000003" ) ) ) );
+ }
+
+
+ @Test
+ public void testAddDelete() throws Exception
+ {
+ for ( int i = 0; i < 500; i++ )
+ {
+ // add + verify
+ DefaultServerEntry entry = buildContextEntry();
+ objectClassIndexTable.add( entry.get( "objectClass" ).get( 0 ).getBytes(), CONTEXT_ENTRY_ID );
+ HTable objectClassIndexHTable = new HTable( OBJECTCLASS_INDEX_TABLE_NAME );
+
+ Get existsGet = new Get( Bytes.toBytes( "=organization\u000000000000-0000-0000-0000-000000000001" ) );
+ assertTrue( objectClassIndexHTable.exists( existsGet ) );
+ Result existsResult = objectClassIndexHTable.get( existsGet );
+ NavigableMap<byte[], byte[]> existsInfoMap = existsResult.getFamilyMap( Bytes.toBytes( "info" ) );
+ assertNotNull( existsInfoMap );
+ assertEquals( 1, existsInfoMap.size() );
+ assertEquals( "e", Bytes.toString( existsInfoMap.get( Bytes.toBytes( "status" ) ) ) );
+
+ // delete + verify
+ objectClassIndexTable.drop( entry.get( "objectClass" ).get( 0 ).getBytes(), CONTEXT_ENTRY_ID );
+
+ Get deletedGet = new Get( Bytes.toBytes( "=organization\u000000000000-0000-0000-0000-000000000001" ) );
+ assertTrue( objectClassIndexHTable.exists( deletedGet ) );
+ Result deletedResult = objectClassIndexHTable.get( deletedGet );
+ NavigableMap<byte[], byte[]> deletedInfoMap = deletedResult.getFamilyMap( Bytes.toBytes( "info" ) );
+ assertNotNull( deletedInfoMap );
+ assertEquals( 1, deletedInfoMap.size() );
+ assertEquals( "d", Bytes.toString( deletedInfoMap.get( Bytes.toBytes( "status" ) ) ) );
+ }
+ for ( int i = 0; i < 500; i++ )
+ {
+ // add + verify
+ DefaultServerEntry entry = buildCnJohnnyWalkerEntry();
+ cnIndexTable.add( entry.get( "cn" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ HTable cnIndexHTable = new HTable( CN_INDEX_TABLE_NAME );
+
+ Get existsGet = new Get( Bytes.toBytes( "=johnny \u65E5\u672C walker" ) );
+ assertTrue( cnIndexHTable.exists( existsGet ) );
+ Result existsResult = cnIndexHTable.get( existsGet );
+ NavigableMap<byte[], byte[]> existsInfoMap = existsResult.getFamilyMap( Bytes.toBytes( "info" ) );
+ assertNotNull( existsInfoMap );
+ assertEquals( 1, existsInfoMap.size() );
+ assertEquals( "e", Bytes.toString( existsInfoMap.get( Utils.toBytes( CN_JOHNNY_WALKER_UUID ) ) ) );
+
+ // delete + verify
+ cnIndexTable.drop( entry.get( "cn" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+
+ Get deletedGet = new Get( Bytes.toBytes( "=johnny \u65E5\u672C walker" ) );
+ assertTrue( cnIndexHTable.exists( deletedGet ) );
+ Result deletedResult = cnIndexHTable.get( deletedGet );
+ NavigableMap<byte[], byte[]> deletedInfoMap = deletedResult.getFamilyMap( Bytes.toBytes( "info" ) );
+ assertNotNull( deletedInfoMap );
+ assertEquals( 1, deletedInfoMap.size() );
+ assertEquals( "d", Bytes.toString( deletedInfoMap.get( Utils.toBytes( CN_JOHNNY_WALKER_UUID ) ) ) );
+ }
+
+ }
+
+
+ @Test
+ public void testCounts() throws Exception
+ {
+ assertEquals( 0, objectClassIndexTable.count( "organization" ) );
+ assertEquals( 0, objectClassIndexTable.count( "organizationalunit" ) );
+ assertEquals( 0, objectClassIndexTable.count( "top" ) );
+ assertEquals( 0, objectClassIndexTable.count( "person" ) );
+ assertEquals( 0, cnIndexTable.count( "johnny \u65E5\u672C walker" ) );
+
+ // 1st entry
+ DefaultServerEntry contextEntry = buildContextEntry();
+ objectClassIndexTable.add( contextEntry.get( "objectClass" ).get( 0 ).getBytes(), CONTEXT_ENTRY_ID );
+
+ assertEquals( 1, objectClassIndexTable.count( "organization" ) );
+
+ // 2nd entry
+ DefaultServerEntry ouSalesEntry = buildOuSalesEntry();
+ objectClassIndexTable.add( ouSalesEntry.get( "objectClass" ).get( 0 ).getBytes(), OU_SALES_ID );
+ objectClassIndexTable.add( ouSalesEntry.get( "objectClass" ).get( 1 ).getBytes(), OU_SALES_ID );
+
+ assertEquals( 1, objectClassIndexTable.count( "top" ) );
+ assertEquals( 1, objectClassIndexTable.count( "organizationalunit" ) );
+
+ // 3rd entry
+ DefaultServerEntry cnEntry = buildCnJohnnyWalkerEntry();
+ objectClassIndexTable.add( cnEntry.get( "objectClass" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.add( cnEntry.get( "objectClass" ).get( 1 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.add( cnEntry.get( "objectClass" ).get( 2 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ cnIndexTable.add( cnEntry.get( "cn" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+
+ assertEquals( 2, objectClassIndexTable.count( "top" ) );
+ assertEquals( 1, objectClassIndexTable.count( "person" ) );
+ assertEquals( 1, cnIndexTable.count( "johnny \u65E5\u672C walker" ) );
+
+ // delete 3rd and 2nd
+ objectClassIndexTable.drop( cnEntry.get( "objectClass" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.drop( cnEntry.get( "objectClass" ).get( 1 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.drop( cnEntry.get( "objectClass" ).get( 2 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ cnIndexTable.drop( cnEntry.get( "cn" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.drop( ouSalesEntry.get( "objectClass" ).get( 0 ).getBytes(), OU_SALES_ID );
+ objectClassIndexTable.drop( ouSalesEntry.get( "objectClass" ).get( 1 ).getBytes(), OU_SALES_ID );
+
+ assertEquals( 0, objectClassIndexTable.count( "organizationalunit" ) );
+ assertEquals( 0, objectClassIndexTable.count( "top" ) );
+ assertEquals( 0, objectClassIndexTable.count( "person" ) );
+ assertEquals( 0, cnIndexTable.count( "johnny \u65E5\u672C walker" ) );
+ assertEquals( 1, objectClassIndexTable.count( "organization" ) );
+
+ // delete 1st
+ objectClassIndexTable.drop( contextEntry.get( "objectClass" ).get( 0 ).getBytes(), CONTEXT_ENTRY_ID );
+
+ assertEquals( 0, objectClassIndexTable.count( "organization" ) );
+
+ // test non-existing values
+ assertEquals( 0, objectClassIndexTable.count( null ) );
+ assertEquals( 0, cnIndexTable.count( null ) );
+ assertEquals( 0, objectClassIndexTable.count( "" ) );
+ assertEquals( 0, cnIndexTable.count( "" ) );
+ assertEquals( 0, objectClassIndexTable.count( "abc" ) );
+ assertEquals( 0, cnIndexTable.count( "abc" ) );
+ assertEquals( 0, objectClassIndexTable.count( "\u65E5\u672C" ) );
+ assertEquals( 0, cnIndexTable.count( "\u65E5\u672C" ) );
+ assertEquals( 0, objectClassIndexTable.count( new byte[]
+ { 0x00, 0x01, 0x02 } ) );
+ assertEquals( 0, cnIndexTable.count( new byte[]
+ { 0x00, 0x01, 0x02 } ) );
+ }
+
+
+ @Test
+ public void testExists() throws Exception
+ {
+ assertFalse( objectClassIndexTable.exists( "organization", CONTEXT_ENTRY_ID ) );
+ assertFalse( objectClassIndexTable.exists( "organizationalunit", OU_SALES_ID ) );
+ assertFalse( objectClassIndexTable.exists( "top", OU_SALES_ID ) );
+ assertFalse( objectClassIndexTable.exists( "top", CN_JOHNNY_WALKER_UUID ) );
+ assertFalse( objectClassIndexTable.exists( "person", CN_JOHNNY_WALKER_UUID ) );
+ assertFalse( cnIndexTable.exists( "johnny \u65E5\u672C walker", CN_JOHNNY_WALKER_UUID ) );
+
+ // 1st entry
+ DefaultServerEntry contextEntry = buildContextEntry();
+ objectClassIndexTable.add( contextEntry.get( "objectClass" ).get( 0 ).getBytes(), CONTEXT_ENTRY_ID );
+
+ assertTrue( objectClassIndexTable.exists( "organization", CONTEXT_ENTRY_ID ) );
+
+ // 2nd entry
+ DefaultServerEntry ouSalesEntry = buildOuSalesEntry();
+ objectClassIndexTable.add( ouSalesEntry.get( "objectClass" ).get( 0 ).getBytes(), OU_SALES_ID );
+ objectClassIndexTable.add( ouSalesEntry.get( "objectClass" ).get( 1 ).getBytes(), OU_SALES_ID );
+
+ assertTrue( objectClassIndexTable.exists( "organizationalunit", OU_SALES_ID ) );
+ assertTrue( objectClassIndexTable.exists( "top", OU_SALES_ID ) );
+
+ // 3rd entry
+ DefaultServerEntry cnEntry = buildCnJohnnyWalkerEntry();
+ objectClassIndexTable.add( cnEntry.get( "objectClass" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.add( cnEntry.get( "objectClass" ).get( 1 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.add( cnEntry.get( "objectClass" ).get( 2 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ cnIndexTable.add( cnEntry.get( "cn" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+
+ assertTrue( objectClassIndexTable.exists( "top", CN_JOHNNY_WALKER_UUID ) );
+ assertTrue( objectClassIndexTable.exists( "person", CN_JOHNNY_WALKER_UUID ) );
+ assertTrue( cnIndexTable.exists( "johnny \u65E5\u672C walker", CN_JOHNNY_WALKER_UUID ) );
+
+ // delete 3rd and 2nd
+ objectClassIndexTable.drop( cnEntry.get( "objectClass" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.drop( cnEntry.get( "objectClass" ).get( 1 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.drop( cnEntry.get( "objectClass" ).get( 2 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ cnIndexTable.drop( cnEntry.get( "cn" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+ objectClassIndexTable.drop( ouSalesEntry.get( "objectClass" ).get( 0 ).getBytes(), OU_SALES_ID );
+ objectClassIndexTable.drop( ouSalesEntry.get( "objectClass" ).get( 1 ).getBytes(), OU_SALES_ID );
+
+ assertTrue( objectClassIndexTable.exists( "organization", CONTEXT_ENTRY_ID ) );
+ assertFalse( objectClassIndexTable.exists( "organizationalunit", OU_SALES_ID ) );
+ assertFalse( objectClassIndexTable.exists( "top", OU_SALES_ID ) );
+ assertFalse( objectClassIndexTable.exists( "top", CN_JOHNNY_WALKER_UUID ) );
+ assertFalse( objectClassIndexTable.exists( "person", CN_JOHNNY_WALKER_UUID ) );
+ assertFalse( cnIndexTable.exists( "johnny \u65E5\u672C walker", CN_JOHNNY_WALKER_UUID ) );
+
+ // delete 1st
+ objectClassIndexTable.drop( contextEntry.get( "objectClass" ).get( 0 ).getBytes(), CONTEXT_ENTRY_ID );
+
+ assertFalse( objectClassIndexTable.exists( "organization", CONTEXT_ENTRY_ID ) );
+
+ // test non-existing values
+ assertFalse( objectClassIndexTable.exists( null, null ) );
+ assertFalse( cnIndexTable.exists( null, null ) );
+ assertFalse( objectClassIndexTable.exists( "", UUID.fromString( "ffffffff-ffff-ffff-ffff-ffffffffffff" ) ) );
+ assertFalse( cnIndexTable.exists( "", UUID.fromString( "ffffffff-ffff-ffff-ffff-ffffffffffff" ) ) );
+ assertFalse( objectClassIndexTable.exists( "abc", UUID.fromString( "00000000-0000-0000-0000-000000000000" ) ) );
+ assertFalse( cnIndexTable.exists( "abc", UUID.fromString( "00000000-0000-0000-0000-000000000000" ) ) );
+ assertFalse( objectClassIndexTable.exists( "\u65E5\u672C", null ) );
+ assertFalse( cnIndexTable.exists( "\u65E5\u672C", null ) );
+ assertFalse( objectClassIndexTable.exists( new byte[]
+ { 0x00, 0x01, 0x02 }, null ) );
+ assertFalse( cnIndexTable.exists( new byte[]
+ { 0x00, 0x01, 0x02 }, null ) );
+ }
+
+
+ @Test
+ public void testGetScanKey() throws Exception
+ {
+ byte[] scanKey = objectClassIndexTable.getScanKey( "bar", HBaseIndexTable.DELIMITER );
+ assertEquals( "=bar\u0000", Bytes.toString( scanKey ) );
+
+ scanKey = objectClassIndexTable.getScanKey( "foo", HBaseIndexTable.DELIMITER );
+ assertEquals( "=foo\u0000", Bytes.toString( scanKey ) );
+
+ scanKey = objectClassIndexTable.getScanKey( "foobar", null );
+ assertEquals( "=foobar", Bytes.toString( scanKey ) );
+
+ scanKey = objectClassIndexTable.getScanKey( "", HBaseIndexTable.DELIMITER );
+ assertEquals( "=\u0000", Bytes.toString( scanKey ) );
+
+ scanKey = objectClassIndexTable.getScanKey( "", null );
+ assertEquals( "=", Bytes.toString( scanKey ) );
+
+ scanKey = objectClassIndexTable.getScanKey( null, null );
+ assertTrue( Bytes.equals( new byte[]
+ { '=' }, scanKey ) );
+
+ scanKey = objectClassIndexTable.getScanKey( null, null );
+ scanKey = Utils.incrementBytes( scanKey );
+ assertTrue( Bytes.equals( new byte[]
+ { '>', }, scanKey ) );
+
+ scanKey = cnIndexTable.getScanKey( "bar" );
+ assertEquals( "=bar", Bytes.toString( scanKey ) );
+
+ scanKey = cnIndexTable.getScanKey( "foo" );
+ assertEquals( "=foo", Bytes.toString( scanKey ) );
+
+ scanKey = cnIndexTable.getScanKey( "" );
+ assertEquals( "=", Bytes.toString( scanKey ) );
+
+ scanKey = cnIndexTable.getScanKey( null );
+ assertTrue( Bytes.equals( new byte[]
+ { '=' }, scanKey ) );
+
+ scanKey = cnIndexTable.getScanKey( null );
+ scanKey = Utils.incrementBytes( scanKey );
+ assertTrue( Bytes.equals( new byte[]
+ { '>' }, scanKey ) );
+ }
+
+
+ @Test
+ public void testGetValue() throws Exception
+ {
+ byte[] bytes = Bytes.toBytes( "=bar\u000012345678-1234-1234-1234-123456789abc" );
+ assertEquals( 41, bytes.length );
+ Object value = objectClassIndexTable.extractValueFromEqualsKey( bytes );
+ assertEquals( "bar", value );
+ UUID uuid = objectClassIndexTable.extractEntryIdFromEqualsKey( bytes );
+ assertEquals( "12345678-1234-1234-1234-123456789abc", uuid.toString() );
+
+ bytes = new byte[]
+ { '=', 0x00, 0x01, 0x00, '0', '0', '0', '0', '0', '0', '0', '0', '-', '0', '0', '0', '0', '-', '0', '0',
+ '0', '0', '-', '0', '0', '0', '0', '-', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0' };
+ assertEquals( 40, bytes.length );
+ value = jpegPhotoIndexTable.extractValueFromEqualsKey( bytes );
+ assertTrue( Bytes.equals( new byte[]
+ { 0x00, 0x01 }, ( byte[] ) value ) );
+ uuid = objectClassIndexTable.extractEntryIdFromEqualsKey( bytes );
+ assertEquals( "00000000-0000-0000-0000-000000000000", uuid.toString() );
+
+ value = cnIndexTable.extractValueFromEqualsKey( Bytes.toBytes( "=foobar" ) );
+ assertEquals( "foobar", value );
+
+ value = userCertificateIndexTable.extractValueFromEqualsKey( new byte[]
+ { '=', 0x00, 0x01 } );
+ assertTrue( Bytes.equals( new byte[]
+ { 0x00, 0x01 }, ( byte[] ) value ) );
+ }
+
+
+ @Test
+ public void testGetColumnCandidates() throws Exception
+ {
+ DefaultServerEntry cnEntry = buildCnJohnnyWalkerEntry();
+ cnIndexTable.add( cnEntry.get( "cn" ).get( 0 ).getBytes(), CN_JOHNNY_WALKER_UUID );
+
+ List<UUID> candidates = cnIndexTable.getColumnCandidates( "johnny \u65E5\u672C walker" );
+ assertNotNull( candidates );
+ assertEquals( 1, candidates.size() );
+ assertTrue( candidates.contains( CN_JOHNNY_WALKER_UUID ) );
+ }
+
+
+ @Test
+ public void last() throws Exception
+ {
+ }
+
+}
Added: directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseMasterTableTest.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseMasterTableTest.java?rev=925326&view=auto
==============================================================================
--- directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseMasterTableTest.java (added)
+++ directory/sandbox/seelmann/hbase-partition-test/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseMasterTableTest.java Fri Mar 19 16:59:00 2010
@@ -0,0 +1,554 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.directory.server.core.partition.hbase.table;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.util.NavigableMap;
+import java.util.UUID;
+
+import org.apache.directory.server.core.entry.DefaultServerEntry;
+import org.apache.directory.server.core.partition.hbase.HBaseStore;
+import org.apache.directory.server.core.partition.hbase.Utils;
+import org.apache.directory.shared.ldap.name.DN;
+import org.apache.directory.shared.ldap.name.RDN;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Before;
+import org.junit.Test;
+
+
+/**
+ * Tests for {@link HBaseMasterTable}.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ * @version $Rev$, $Date$
+ */
+public class HBaseMasterTableTest extends AbstractHBaseTableTest
+{
+
+ private static final String MASTER_TABLE_NAME = TABLE_PREFIX + "master";
+ private static final String TREE_TABLE_NAME = TABLE_PREFIX + "tree";
+ private HBaseMasterTable masterTable;
+
+
+ /**
+ * Cleanup HBase tables before each test.
+ *
+ */
+ @Before
+ public void setup() throws Exception
+ {
+ HBaseAdmin admin = new HBaseAdmin( adapter.getHBaseConfigurtion() );
+ if ( admin.tableExists( MASTER_TABLE_NAME ) )
+ {
+ HTable masterHTable = new HTable( MASTER_TABLE_NAME );
+ ResultScanner masterScanner = masterHTable.getScanner( new Scan() );
+ Result masterResult;
+ while ( ( masterResult = masterScanner.next() ) != null )
+ {
+ masterHTable.delete( new Delete( masterResult.getRow() ) );
+ }
+ }
+ if ( admin.tableExists( TREE_TABLE_NAME ) )
+ {
+ HTable treeHTable = new HTable( TREE_TABLE_NAME );
+ ResultScanner treeScanner = treeHTable.getScanner( new Scan() );
+ Result treeResult;
+ while ( ( treeResult = treeScanner.next() ) != null )
+ {
+ treeHTable.delete( new Delete( treeResult.getRow() ) );
+ }
+ }
+
+ HBaseStore store = new HBaseStore();
+ store.setSuffixDn( suffixDn.getName() );
+ store.setCacheSize( 100 );
+ store.setTablePrefix( TABLE_PREFIX );
+ store.init( schemaManager );
+
+ masterTable = new HBaseMasterTable( store );
+ }
+
+
+ @Test
+ public void first() throws Exception
+ {
+ DefaultServerEntry entry = buildContextEntry();
+ masterTable.add( entry );
+ }
+
+
+ @Test
+ public void testEmpty() throws Exception
+ {
+ assertNull( masterTable.fetchId( new DN( "a=b" ) ) );
+ }
+
+
+ @Test
+ public void testAdd() throws Exception
+ {
+ // add context entry
+ DefaultServerEntry entry = buildContextEntry();
+ masterTable.add( entry );
+
+ HTable masterHTable = new HTable( MASTER_TABLE_NAME );
+ HTable treeHTable = new HTable( TREE_TABLE_NAME );
+
+ // check state in HBase table 'master'
+ Get masterGet = new Get( Bytes.toBytes( CONTEXT_ENTRY_ID.toString() ) );
+ assertTrue( masterHTable.exists( masterGet ) );
+ Result masterResult = masterHTable.get( masterGet );
+
+ assertEquals( "00000000-0000-0000-0000-000000000001", Bytes.toString( masterResult.getRow() ) );
+
+ NavigableMap<byte[], byte[]> treeInfoMap = masterResult.getFamilyMap( Bytes.toBytes( "treeInfo" ) );
+ assertNotNull( treeInfoMap );
+ assertEquals( 4, treeInfoMap.size() );
+ assertEquals( "e", Bytes.toString( treeInfoMap.get( Bytes.toBytes( "status" ) ) ) );
+ assertEquals( "o=Good Times Co.", Bytes.toString( treeInfoMap.get( Bytes.toBytes( "upRdn" ) ) ) );
+ assertEquals( "2.5.4.10=good times co.", Bytes.toString( treeInfoMap.get( Bytes.toBytes( "normRdn" ) ) ) );
+ assertEquals( "00000000-0000-0000-0000-000000000000", Bytes.toString( treeInfoMap.get( Bytes
+ .toBytes( "parentId" ) ) ) );
+
+ NavigableMap<byte[], byte[]> upAttributesMap = masterResult.getFamilyMap( Bytes.toBytes( "upAttributes" ) );
+ assertNotNull( upAttributesMap );
+ assertEquals( 6, upAttributesMap.size() );
+ assertEquals( "Good Times Co.", Bytes.toString( upAttributesMap.get( Bytes.add( Bytes.toBytes( "o" ), Bytes
+ .toBytes( 0 ) ) ) ) );
+ assertEquals( "organization", Bytes.toString( upAttributesMap.get( Bytes.add( Bytes.toBytes( "objectClass" ),
+ Bytes.toBytes( 0 ) ) ) ) );
+ assertEquals( "1", Bytes.toString( upAttributesMap.get( Bytes.add( Bytes.toBytes( "postalCode" ), Bytes
+ .toBytes( 0 ) ) ) ) );
+ assertEquals( "1", Bytes.toString( upAttributesMap.get( Bytes.add( Bytes.toBytes( "postOfficeBox" ), Bytes
+ .toBytes( 0 ) ) ) ) );
+ assertEquals( entry.get( "entryUUID" ).get().getString(), Bytes.toString( upAttributesMap.get( Bytes.add( Bytes
+ .toBytes( "entryUUID" ), Bytes.toBytes( 0 ) ) ) ) );
+ assertEquals( entry.get( "entryCsn" ).get().getString(), Bytes.toString( upAttributesMap.get( Bytes.add( Bytes
+ .toBytes( "entryCsn" ), Bytes.toBytes( 0 ) ) ) ) );
+
+ // check state in HBase table 'tree'
+ Get treeGet = new Get( Bytes.add( Bytes.toBytes( "00000000-0000-0000-0000-000000000000" ),
+ Bytes.toBytes( "," ), Bytes.toBytes( "2.5.4.10=good times co." ) ) );
+ Result treeResult = treeHTable.get( treeGet );
+
+ //assertEquals( "", Bytes.toLong( treeResult.getRow() ));
+
+ NavigableMap<byte[], byte[]> infoMap = treeResult.getFamilyMap( Bytes.toBytes( "treeInfo" ) );
+ assertNotNull( infoMap );
+ assertEquals( 2, infoMap.size() );
+ assertEquals( "e", Bytes.toString( infoMap.get( Bytes.toBytes( "status" ) ) ) );
+ assertEquals( "00000000-0000-0000-0000-000000000001", Bytes.toString( infoMap.get( Bytes.toBytes( "id" ) ) ) );
+ // no oneLevelCount and subLevelCount column yet
+
+ NavigableMap<byte[], byte[]> normAttributesMap = treeResult.getFamilyMap( Bytes.toBytes( "normAttributes" ) );
+ assertNotNull( normAttributesMap );
+ assertEquals( 6, normAttributesMap.size() );
+
+ assertEquals( 0, Bytes.toInt( normAttributesMap.get( Bytes.toBytes( "2.5.4.0=organization" ) ) ) );
+ assertEquals( 0, Bytes.toInt( normAttributesMap.get( Bytes.toBytes( "2.5.4.10=good times co." ) ) ) );
+ assertEquals( 0, Bytes.toInt( normAttributesMap.get( Bytes.toBytes( "2.5.4.17=1" ) ) ) );
+ assertEquals( 0, Bytes.toInt( normAttributesMap.get( Bytes.toBytes( "2.5.4.18=1" ) ) ) );
+ // ...
+
+ // add second entry
+ entry = buildOuSalesEntry();
+ masterTable.add( entry );
+
+ // check in HBase tables
+ masterGet = new Get( Bytes.toBytes( OU_SALES_ID.toString() ) );
+ assertTrue( masterHTable.exists( masterGet ) );
+ treeGet = new Get( Bytes.add( Bytes.toBytes( CONTEXT_ENTRY_ID.toString() ), Bytes.toBytes( "," ), Bytes
+ .toBytes( "2.5.4.11=sales \u65E5\u672C" ) ) );
+ assertTrue( treeHTable.exists( treeGet ) );
+ }
+
+
+ @Test
+ public void testDelete() throws Exception
+ {
+ // add context entry
+ DefaultServerEntry entry = buildContextEntry();
+ masterTable.add( entry );
+
+ // add second entry
+ entry = buildOuSalesEntry();
+ masterTable.add( entry );
+
+ HTable masterHTable = new HTable( MASTER_TABLE_NAME );
+ HTable treeHTable = new HTable( TREE_TABLE_NAME );
+
+ // delete second entry
+ masterTable.delete( OU_SALES_ID, entry );
+ Get masterGet = new Get( Bytes.toBytes( OU_SALES_ID.toString() ) );
+ assertTrue( masterHTable.exists( masterGet ) );
+ Get treeGet = new Get( Bytes.add( Bytes.toBytes( CONTEXT_ENTRY_ID.toString() ), Bytes.toBytes( "," ), Bytes
+ .toBytes( "2.5.4.11=sales \u65E5\u672C" ) ) );
+ assertTrue( treeHTable.exists( treeGet ) );
+ assertTrue( masterHTable.exists( new Get( Bytes.toBytes( CONTEXT_ENTRY_ID.toString() ) ) ) );
+
+ // delete context entry
+ masterTable.delete( CONTEXT_ENTRY_ID, entry );
+ masterGet = new Get( Bytes.toBytes( CONTEXT_ENTRY_ID.toString() ) );
+ assertTrue( masterHTable.exists( masterGet ) );
+ treeGet = new Get( Bytes.add( Bytes.toBytes( "00000000-0000-0000-0000-000000000000" ), Bytes.toBytes( "," ), Bytes
+ .toBytes( "2.5.4.10=good times co." ) ) );
+ assertTrue( treeHTable.exists( treeGet ) );
+ }
+
+
+ @Test
+ public void testModify() throws Exception
+ {
+ // add context entry
+ DefaultServerEntry entry = buildContextEntry();
+ masterTable.add( entry );
+
+ // modify some attributes
+ entry.get( "objectClass" ).add( "top" );
+ entry.removeAttributes( "postalCode" );
+ entry.get( "postOfficeBox" ).put( "2" );
+ masterTable.modify( CONTEXT_ENTRY_ID, entry );
+
+ // check state in HBase table 'master'
+ HTable masterHTable = new HTable( MASTER_TABLE_NAME );
+ Get masterGet = new Get( Bytes.toBytes( CONTEXT_ENTRY_ID.toString() ) );
+ assertTrue( masterHTable.exists( masterGet ) );
+ Result masterResult = masterHTable.get( masterGet );
+
+ assertEquals( "00000000-0000-0000-0000-000000000001", Bytes.toString( masterResult.getRow() ) );
+
+ NavigableMap<byte[], byte[]> treeInfoMap = masterResult.getFamilyMap( Bytes.toBytes( "treeInfo" ) );
+ assertNotNull( treeInfoMap );
+ assertEquals( 4, treeInfoMap.size() );
+ assertEquals( "e", Bytes.toString( treeInfoMap.get( Bytes.toBytes( "status" ) ) ) );
+ assertEquals( "o=Good Times Co.", Bytes.toString( treeInfoMap.get( Bytes.toBytes( "upRdn" ) ) ) );
+ assertEquals( "2.5.4.10=good times co.", Bytes.toString( treeInfoMap.get( Bytes.toBytes( "normRdn" ) ) ) );
+ assertEquals( "00000000-0000-0000-0000-000000000000", Bytes.toString( treeInfoMap.get( Bytes
+ .toBytes( "parentId" ) ) ) );
+
+ NavigableMap<byte[], byte[]> upAttributesMap = masterResult.getFamilyMap( Bytes.toBytes( "upAttributes" ) );
+ assertNotNull( upAttributesMap );
+ assertEquals( 7, upAttributesMap.size() );
+ assertEquals( "\u0000", Bytes.toString( upAttributesMap.get( Bytes.add( Bytes.toBytes( "postalCode" ), Bytes
+ .toBytes( 0 ) ) ) ) );
+ assertEquals( "Good Times Co.", Bytes.toString( upAttributesMap.get( Bytes.add( Bytes.toBytes( "o" ), Bytes
+ .toBytes( 0 ) ) ) ) );
+ assertEquals( "organization", Bytes.toString( upAttributesMap.get( Bytes.add( Bytes.toBytes( "objectClass" ),
+ Bytes.toBytes( 0 ) ) ) ) );
+ assertEquals( "top", Bytes.toString( upAttributesMap.get( Bytes.add( Bytes.toBytes( "objectClass" ), Bytes
+ .toBytes( 1 ) ) ) ) );
+ assertEquals( "2", Bytes.toString( upAttributesMap.get( Bytes.add( Bytes.toBytes( "postOfficeBox" ), Bytes
+ .toBytes( 0 ) ) ) ) );
+ assertEquals( entry.get( "entryUUID" ).get().getString(), Bytes.toString( upAttributesMap.get( Bytes.add( Bytes
+ .toBytes( "entryUUID" ), Bytes.toBytes( 0 ) ) ) ) );
+ assertEquals( entry.get( "entryCsn" ).get().getString(), Bytes.toString( upAttributesMap.get( Bytes.add( Bytes
+ .toBytes( "entryCsn" ), Bytes.toBytes( 0 ) ) ) ) );
+ }
+
+
+ @Test
+ public void testRename() throws Exception
+ {
+ // add context entry
+ DefaultServerEntry entry = buildContextEntry();
+ masterTable.add( entry );
+
+ // add second entry
+ entry = buildOuSalesEntry();
+ masterTable.add( entry );
+
+ // rename
+ RDN rdn = new RDN("ou=SaLeS-US");
+ rdn.normalize( schemaManager.getNormalizerMapping() );
+ masterTable.move( OU_SALES_ID, null, rdn );
+
+ HTable masterHTable = new HTable( MASTER_TABLE_NAME );
+ HTable treeHTable = new HTable( TREE_TABLE_NAME );
+
+ // check in HBase tables
+ Get masterGet = new Get( Bytes.toBytes( OU_SALES_ID.toString() ) );
+ assertTrue( masterHTable.exists( masterGet ) );
+ Get treeGet = new Get( Bytes.add( Bytes.toBytes( CONTEXT_ENTRY_ID.toString() ), Bytes.toBytes( "," ), Bytes
+ .toBytes( "2.5.4.11=sales-us" ) ) );
+ assertTrue( treeHTable.exists( treeGet ) );
+
+ }
+
+ private void dump() throws Exception
+ {
+ HTable masterHTable = new HTable( MASTER_TABLE_NAME );
+ ResultScanner masterScanner = masterHTable.getScanner( new Scan() );
+ Result masterResult;
+ while ( ( masterResult = masterScanner.next() ) != null )
+ {
+ System.out.println( "master: " + Utils.getPrintableString( masterResult.getRow() ) );
+ }
+
+ HTable treeHTable = new HTable( TREE_TABLE_NAME );
+ ResultScanner treeScanner = treeHTable.getScanner( new Scan() );
+ Result treeResult;
+ while ( ( treeResult = treeScanner.next() ) != null )
+ {
+ System.out.println( "tree: " + Utils.getPrintableString( treeResult.getRow() ) );
+ }
+ }
+
+ @Test
+ public void testFetch() throws Exception
+ {
+ // add first entry
+ DefaultServerEntry contextEntry = buildContextEntry();
+ masterTable.add( contextEntry );
+
+ // fetch norm attributes
+ long count1 = HBaseTableHelper.RPC_COUNT;
+ NavigableMap<byte[], byte[]> normAttributesMap = masterTable.fetchNormAttributes( CONTEXT_ENTRY_ID );
+ assertNotNull( normAttributesMap );
+ assertEquals( 6, normAttributesMap.size() );
+ assertEquals( 0, Bytes.toInt( normAttributesMap.get( Bytes.toBytes( "2.5.4.0=organization" ) ) ) );
+ assertEquals( 0, Bytes.toInt( normAttributesMap.get( Bytes.toBytes( "2.5.4.10=good times co." ) ) ) );
+ assertEquals( 0, Bytes.toInt( normAttributesMap.get( Bytes.toBytes( "2.5.4.17=1" ) ) ) );
+ assertEquals( 0, Bytes.toInt( normAttributesMap.get( Bytes.toBytes( "2.5.4.18=1" ) ) ) );
+ // ...
+ long count2 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( 2, count2 - count1 ); // treeInfo from master table + normAttributes from tree table
+
+ // fetch again, this time we hit the cache
+ long count5 = HBaseTableHelper.RPC_COUNT;
+ normAttributesMap = masterTable.fetchNormAttributes( CONTEXT_ENTRY_ID );
+ long count6 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( 1, count6 - count5 ); // normAttributes from tree table
+ }
+
+
+ @Test
+ public void testFetchId() throws Exception
+ {
+ // add first entry
+ DefaultServerEntry contextEntry = buildContextEntry();
+ masterTable.add( contextEntry );
+
+ // fetch ID
+ long count1 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( "00000000-0000-0000-0000-000000000001", masterTable.fetchId( contextEntry.getDn() ).toString() );
+ long count2 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( 1, count2 - count1 );
+
+ // add second entry
+ DefaultServerEntry ouSalesEntry = buildOuSalesEntry();
+ masterTable.add( ouSalesEntry );
+
+ // fetch entry ID
+ long count3 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( "00000000-0000-0000-0000-000000000002", masterTable.fetchId( ouSalesEntry.getDn() ).toString() );
+ long count4 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( 1, count4 - count3 );
+
+ // fetch IDs again, this time we hit the cache
+ long count5 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( "00000000-0000-0000-0000-000000000001", masterTable.fetchId( contextEntry.getDn() ).toString() );
+ assertEquals( "00000000-0000-0000-0000-000000000002", masterTable.fetchId( ouSalesEntry.getDn() ).toString() );
+ long count6 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( 0, count6 - count5 );
+
+ // check result after deleting the entry
+ masterTable.delete( OU_SALES_ID, ouSalesEntry );
+ assertNull( masterTable.fetchId( ouSalesEntry.getDn() ) );
+
+ // fetch entry IDs of non-existing entries
+ assertNull( masterTable.fetchId( null ) );
+ assertNull( masterTable.fetchId( new DN( "a=b" ) ) );
+ }
+
+
+ @Test
+ public void testFetchParentId() throws Exception
+ {
+ // add first entry
+ DefaultServerEntry contextEntry = buildContextEntry();
+ masterTable.add( contextEntry );
+
+ // fetch parent ID
+ long count1 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( "00000000-0000-0000-0000-000000000000", masterTable.fetchParentId( CONTEXT_ENTRY_ID ).toString() );
+ long count2 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( 1, count2 - count1 );
+
+ // add second entry
+ DefaultServerEntry ouSalesEntry = buildOuSalesEntry();
+ masterTable.add( ouSalesEntry );
+
+ // fetch parent ID
+ long count3 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( "00000000-0000-0000-0000-000000000001", masterTable.fetchParentId( OU_SALES_ID ).toString() );
+ long count4 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( 1, count4 - count3 );
+
+ // fetch parents again, this time we hit the cache
+ long count5 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( "00000000-0000-0000-0000-000000000000", masterTable.fetchParentId( CONTEXT_ENTRY_ID ).toString() );
+ assertEquals( "00000000-0000-0000-0000-000000000001", masterTable.fetchParentId( OU_SALES_ID ).toString() );
+ long count6 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( 0, count6 - count5 );
+
+ // check result after deleting the entry
+ masterTable.delete( OU_SALES_ID, ouSalesEntry );
+ assertNull( masterTable.fetchParentId( OU_SALES_ID ) );
+
+ // fetch parent IDs of non-existing entries
+ assertNull( masterTable.fetchParentId( null ) );
+ assertNull( masterTable.fetchParentId( UUID.fromString( "00000000-0000-0000-0000-000000000000" ) ) );
+ assertNull( masterTable.fetchParentId( UUID.randomUUID() ) );
+ }
+
+
+ @Test
+ public void testFetchEntry() throws Exception
+ {
+ // add first entry
+ DefaultServerEntry contextEntry = buildContextEntry();
+ masterTable.add( contextEntry );
+
+ // fetch first entry
+ long count1 = HBaseTableHelper.RPC_COUNT;
+ assertNotNull( masterTable.fetchEntry( CONTEXT_ENTRY_ID ) );
+ assertEquals( "o=Good Times Co.", masterTable.fetchEntry( CONTEXT_ENTRY_ID ).getDn().getName() );
+ assertEquals( contextEntry, masterTable.fetchEntry( CONTEXT_ENTRY_ID ) );
+ long count2 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( 1, count2 - count1 );
+
+ // add second entry
+ DefaultServerEntry ouSalesEntry = buildOuSalesEntry();
+ masterTable.add( ouSalesEntry );
+
+ // fetch second entry
+ long count3 = HBaseTableHelper.RPC_COUNT;
+ assertNotNull( masterTable.fetchEntry( OU_SALES_ID ) );
+ assertEquals( "ou=Sales \\E6\\97\\A5\\E6\\9C\\AC,o=Good Times Co.", masterTable.fetchEntry( OU_SALES_ID )
+ .getDn().getName() );
+ assertEquals( "2.5.4.11=sales \u65E5\u672C,2.5.4.10=good times co.", masterTable.fetchEntry( OU_SALES_ID )
+ .getDn().getNormName() );
+ assertEquals( ouSalesEntry, masterTable.fetchEntry( OU_SALES_ID ) );
+ long count4 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( 1, count4 - count3 );
+
+ // fetch entries again, this time we hit the cache
+ long count5 = HBaseTableHelper.RPC_COUNT;
+ assertNotNull( masterTable.fetchEntry( CONTEXT_ENTRY_ID ) );
+ assertNotNull( masterTable.fetchEntry( OU_SALES_ID ) );
+ long count6 = HBaseTableHelper.RPC_COUNT;
+ assertEquals( 0, count6 - count5 );
+
+ // check result after deleting the entry
+ masterTable.delete( OU_SALES_ID, ouSalesEntry );
+ assertNull( masterTable.fetchEntry( OU_SALES_ID ) );
+
+ // fetch non-existing entries
+ assertNull( masterTable.fetchEntry( null ) );
+ assertNull( masterTable.fetchEntry( UUID.fromString( "00000000-0000-0000-0000-000000000000" ) ) );
+ assertNull( masterTable.fetchEntry( UUID.randomUUID() ) );
+ }
+
+
+ @Test
+ public void testOneAndSubLevelCount() throws Exception
+ {
+ // add context entry
+ DefaultServerEntry contextEntry = buildContextEntry();
+ masterTable.add( contextEntry );
+
+ // check counters
+ assertEquals( 0, masterTable.getOneLevelCount( CONTEXT_ENTRY_ID ) );
+ assertEquals( 0, masterTable.getSubLevelCount( CONTEXT_ENTRY_ID ) );
+
+ // add ou=Sales entry
+ DefaultServerEntry ouSalesEntry = buildOuSalesEntry();
+ masterTable.add( ouSalesEntry );
+
+ // check counters
+ assertEquals( 0, masterTable.getOneLevelCount( OU_SALES_ID ) );
+ assertEquals( 0, masterTable.getSubLevelCount( OU_SALES_ID ) );
+
+ // check updated counters of context entry
+ assertEquals( 1, masterTable.getOneLevelCount( CONTEXT_ENTRY_ID ) );
+ assertEquals( 1, masterTable.getSubLevelCount( CONTEXT_ENTRY_ID ) );
+
+ // add cn=Jonny Walker entry
+ DefaultServerEntry cnJohnnyWalkerEntry = buildCnJohnnyWalkerEntry();
+ masterTable.add( cnJohnnyWalkerEntry );
+
+ // check counters
+ assertEquals( 0, masterTable.getOneLevelCount( CN_JOHNNY_WALKER_UUID ) );
+ assertEquals( 0, masterTable.getSubLevelCount( CN_JOHNNY_WALKER_UUID ) );
+
+ // check updated counters of ou=Sales entry
+ assertEquals( 1, masterTable.getOneLevelCount( OU_SALES_ID ) );
+ assertEquals( 1, masterTable.getSubLevelCount( OU_SALES_ID ) );
+
+ // check updated counters of context entry
+ assertEquals( 1, masterTable.getOneLevelCount( CONTEXT_ENTRY_ID ) );
+ assertEquals( 2, masterTable.getSubLevelCount( CONTEXT_ENTRY_ID ) );
+
+ // delete cn=Jonny Walker entry
+ masterTable.delete( CN_JOHNNY_WALKER_UUID, cnJohnnyWalkerEntry );
+
+ // check updated counters of ou=Sales entry
+ assertEquals( 0, masterTable.getOneLevelCount( OU_SALES_ID ) );
+ assertEquals( 0, masterTable.getSubLevelCount( OU_SALES_ID ) );
+
+ // check updated counters of context entry
+ assertEquals( 1, masterTable.getOneLevelCount( CONTEXT_ENTRY_ID ) );
+ assertEquals( 1, masterTable.getSubLevelCount( CONTEXT_ENTRY_ID ) );
+
+ // delete ou=Sales entry
+ masterTable.delete( OU_SALES_ID, ouSalesEntry );
+
+ // check updated counters of context entry
+ assertEquals( 0, masterTable.getOneLevelCount( CONTEXT_ENTRY_ID ) );
+ assertEquals( 0, masterTable.getSubLevelCount( CONTEXT_ENTRY_ID ) );
+
+ // test counts for non-existing entries
+ assertEquals( 0, masterTable.getOneLevelCount( null ) );
+ assertEquals( 0, masterTable.getSubLevelCount( null ) );
+ assertEquals( 0, masterTable.getOneLevelCount( UUID.fromString( "00000000-0000-0000-0000-000000000000" ) ) );
+ assertEquals( 0, masterTable.getSubLevelCount( UUID.fromString( "00000000-0000-0000-0000-000000000000" ) ) );
+ assertEquals( 0, masterTable.getOneLevelCount( UUID.randomUUID() ) );
+ assertEquals( 0, masterTable.getSubLevelCount( UUID.randomUUID() ) );
+ }
+
+
+ @Test
+ public void last() throws Exception
+ {
+ }
+
+}