You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by se...@apache.org on 2010/03/19 18:03:27 UTC

svn commit: r925331 - in /directory/sandbox/seelmann/hbase-partition: ./ src/main/java/org/apache/directory/server/core/partition/hbase/ src/main/java/org/apache/directory/server/core/partition/hbase/cursor/ src/main/java/org/apache/directory/server/co...

Author: seelmann
Date: Fri Mar 19 17:03:26 2010
New Revision: 925331

URL: http://svn.apache.org/viewvc?rev=925331&view=rev
Log:
o Implemented rename and move operations
o Improved modify operation
o Changed delete operation, only set a deleted flag
o Added instance name to table prefix
o Added filter expansion for server-side filtering
o Moved tests into separate module


Removed:
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseClusterTestCaseAdapter.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedRunner.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/HBaseEmbeddedRunner.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/JdbmRunner.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/AbstractHBasePartitionIT.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/HBasePartitionIT.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/HBasePartitionPerformanceIT.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/HBaseRunner.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/GetPerformanceEvaluation.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/LdifImportAndIndexIT.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteLdifImport.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/it/mapreduce/RemoteRunner.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/AbstractHBaseTableTest.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTableTest.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBaseMasterTableTest.java
    directory/sandbox/seelmann/hbase-partition/src/test/java/org/apache/directory/server/core/partition/hbase/table/HBasePresenceIndexTableTest.java
Modified:
    directory/sandbox/seelmann/hbase-partition/pom.xml
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedPartition.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/HBaseStore.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseNdnIndexCursor.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseOneLevelIndexCursor.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBasePresenceIndexCursor.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseSubLevelIndexCursor.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseUserColumnIndexCursor.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseUserRowIndexCursor.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/index/HBaseUserRowIndex.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseColumnIndexTable.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTable.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseMasterTable.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBasePresenceIndexTable.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseRowIndexTable.java
    directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTableHelper.java

Modified: directory/sandbox/seelmann/hbase-partition/pom.xml
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/pom.xml?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/pom.xml (original)
+++ directory/sandbox/seelmann/hbase-partition/pom.xml Fri Mar 19 17:03:26 2010
@@ -57,14 +57,6 @@
       <version>${pom.version}</version>
     </dependency>
 
-    <!-- ApacheDS integration test framework -->
-    <dependency>
-      <groupId>org.apache.directory.server</groupId>
-      <artifactId>apacheds-server-integ</artifactId>
-      <version>${pom.version}</version>
-      <scope>test</scope>
-    </dependency>
-
     <!-- HBase dependencies -->
     <!-- 
       Unfortunately HBase artifacts are not yet available in public maven repo.

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedPartition.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedPartition.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedPartition.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/HBaseDistributedPartition.java Fri Mar 19 17:03:26 2010
@@ -88,7 +88,8 @@ public class HBaseDistributedPartition e
         store.setSuffixDn( getSuffixDn().normalize( oidsMap ).getNormName() );
         store.setCacheSize( cacheSize );
         store.setName( id );
-        ( ( HBaseStore ) store ).setTablePrefix( "apacheds" + "_" + id + "_" );
+        String directoryServiceInstanceName = getPartitionDir().getParentFile().getName();
+        ( ( HBaseStore ) store ).setTablePrefix( "apacheds" + "_" + directoryServiceInstanceName + "_" + id + "_" );
 
         Set<Index<?, ServerEntry, UUID>> userIndices = new HashSet<Index<?, ServerEntry, UUID>>();
 

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/HBaseStore.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/HBaseStore.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/HBaseStore.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/HBaseStore.java Fri Mar 19 17:03:26 2010
@@ -21,6 +21,7 @@ package org.apache.directory.server.core
 
 
 import java.io.File;
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -31,8 +32,10 @@ import java.util.UUID;
 
 import javax.naming.NamingException;
 
+import org.apache.directory.server.core.entry.DefaultServerAttribute;
 import org.apache.directory.server.core.entry.ServerAttribute;
 import org.apache.directory.server.core.entry.ServerEntry;
+import org.apache.directory.server.core.entry.ServerModification;
 import org.apache.directory.server.core.partition.hbase.index.HBaseAliasIndex;
 import org.apache.directory.server.core.partition.hbase.index.HBaseNdnIndex;
 import org.apache.directory.server.core.partition.hbase.index.HBaseOneAliasIndex;
@@ -51,6 +54,7 @@ import org.apache.directory.shared.ldap.
 import org.apache.directory.shared.ldap.entry.Modification;
 import org.apache.directory.shared.ldap.entry.ModificationOperation;
 import org.apache.directory.shared.ldap.entry.Value;
+import org.apache.directory.shared.ldap.name.AVA;
 import org.apache.directory.shared.ldap.name.DN;
 import org.apache.directory.shared.ldap.name.RDN;
 import org.apache.directory.shared.ldap.schema.AttributeType;
@@ -108,8 +112,7 @@ public class HBaseStore implements Store
         }
 
         this.schemaManager = schemaManager;
-        this.suffixDn = new DN( suffix )
-            .normalize( schemaManager.getAttributeTypeRegistry().getNormalizerMapping() );
+        this.suffixDn = new DN( suffix ).normalize( schemaManager.getAttributeTypeRegistry().getNormalizerMapping() );
 
         // setup system indices
         if ( presenceIndex == null )
@@ -408,6 +411,12 @@ public class HBaseStore implements Store
     }
 
 
+    public Index<?, ServerEntry, UUID> getIndex( String id ) throws IndexNotFoundException
+    {
+        return getUserIndex( id );
+    }
+
+
     public HBaseUserIndex<HBaseIndexTable> getUserIndex( String id ) throws IndexNotFoundException
     {
         id = getAttributeTypeOid( id );
@@ -448,7 +457,14 @@ public class HBaseStore implements Store
 
     public boolean hasSystemIndexOn( String id ) throws Exception
     {
-        throw new UnsupportedOperationException();
+        //throw new UnsupportedOperationException();
+        return false;
+    }
+
+
+    public boolean hasIndexOn( String id ) throws Exception
+    {
+        return hasUserIndexOn( id );
     }
 
 
@@ -548,7 +564,7 @@ public class HBaseStore implements Store
             }
         }
 
-        // write modified entry to id2entry table
+        // write modified entry to master and tree table
         getMasterTable().modify( id, entry );
 
         // write new indices
@@ -575,21 +591,50 @@ public class HBaseStore implements Store
     }
 
 
-    public void move( DN arg0, DN arg1 ) throws Exception
+    public void move( DN oldChildDn, DN newParentDn ) throws Exception
     {
-        throw new UnsupportedOperationException();
+        move( oldChildDn, newParentDn, null, false );
     }
 
 
-    public void move( DN arg0, DN arg1, RDN arg2, boolean arg3 ) throws Exception
+    public void move( DN dn, DN newParentDn, RDN newRdn, boolean deleteOldRdn ) throws Exception
     {
-        throw new UnsupportedOperationException();
+        UUID id = getEntryId( dn.getNormName() );
+
+        if ( newRdn != null )
+        {
+            // apply attribute modifications
+            List<Modification> mods = new ArrayList<Modification>();
+            if ( deleteOldRdn )
+            {
+                for ( AVA ava : dn.getRdn() )
+                {
+                    AttributeType at = schemaManager.lookupAttributeTypeRegistry( ava.getUpType() );
+                    DefaultServerAttribute attribute = new DefaultServerAttribute( ava.getUpType(), at, ava
+                        .getUpValue() );
+                    Modification mod = new ServerModification( ModificationOperation.REMOVE_ATTRIBUTE, attribute );
+                    mods.add( mod );
+                }
+            }
+            for ( AVA ava : newRdn )
+            {
+                AttributeType at = schemaManager.lookupAttributeTypeRegistry( ava.getUpType() );
+                DefaultServerAttribute attribute = new DefaultServerAttribute( ava.getUpType(), at, ava.getUpValue() );
+                Modification mod = new ServerModification( ModificationOperation.ADD_ATTRIBUTE, attribute );
+                mods.add( mod );
+            }
+            modify( dn, mods );
+        }
+
+        
+        UUID newParentId = newParentDn != null ? getEntryId( newParentDn.getNormName() ) : null;
+        getMasterTable().move( id, newParentId, newRdn );
     }
 
 
-    public void rename( DN arg0, RDN arg1, boolean arg2 ) throws Exception
+    public void rename( DN dn, RDN newRdn, boolean deleteOldRdn ) throws Exception
     {
-        throw new UnsupportedOperationException();
+        move( dn, null, newRdn, deleteOldRdn );
     }
 
 
@@ -725,6 +770,7 @@ public class HBaseStore implements Store
 
     public void setTablePrefix( String tablePrefix )
     {
+        System.out.println("tablePrefix="+tablePrefix);
         this.tablePrefix = tablePrefix;
     }
 
@@ -745,10 +791,11 @@ public class HBaseStore implements Store
     {
         return configuration;
     }
-    
+
 
     public UUID getDefaultId() throws Exception
     {
         return getEntryId( getSuffixDn() );
     }
+
 }

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseNdnIndexCursor.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseNdnIndexCursor.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseNdnIndexCursor.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseNdnIndexCursor.java Fri Mar 19 17:03:26 2010
@@ -29,6 +29,8 @@ import org.apache.directory.server.core.
 import org.apache.directory.server.xdbm.ForwardIndexEntry;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 
 
 /**
@@ -53,6 +55,8 @@ public class HBaseNdnIndexCursor extends
         if ( scanner == null )
         {
             Scan s = new Scan();
+            s.setFilter( new SingleColumnValueFilter( HBaseMasterTable.TREE_INFO_FAMILY,
+                HBaseMasterTable.STATUS_QUALIFIER, CompareOp.EQUAL, HBaseMasterTable.EXISTS ) );
             s.addFamily( HBaseMasterTable.TREE_INFO_FAMILY );
             //s.addFamily( HBaseId2entryTable.UP_ATTRIBUTES_FAMILY );
             scanner = store.getMasterTable().getScanner( s );

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseOneLevelIndexCursor.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseOneLevelIndexCursor.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseOneLevelIndexCursor.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseOneLevelIndexCursor.java Fri Mar 19 17:03:26 2010
@@ -30,6 +30,11 @@ import org.apache.directory.server.xdbm.
 import org.apache.directory.shared.ldap.filter.ExprNode;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.FilterList;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.FilterList.Operator;
 
 
 /**
@@ -45,7 +50,7 @@ public class HBaseOneLevelIndexCursor ex
     private UUID baseId;
 
     /** The filter to apply while scanning. */
-    private ExprNode filter;
+    private ExprNode exprNode;
 
 
     public HBaseOneLevelIndexCursor( UUID baseId, HBaseStore store )
@@ -55,10 +60,10 @@ public class HBaseOneLevelIndexCursor ex
     }
 
 
-    public HBaseOneLevelIndexCursor( UUID baseId, ExprNode filter, HBaseStore store )
+    public HBaseOneLevelIndexCursor( UUID baseId, ExprNode exprNode, HBaseStore store )
     {
         this( baseId, store );
-        this.filter = filter;
+        this.exprNode = exprNode;
     }
 
 
@@ -72,7 +77,19 @@ public class HBaseOneLevelIndexCursor ex
             Scan s = new Scan( start, stop );
             s.addFamily( HBaseMasterTable.TREE_INFO_FAMILY );
             s.addFamily( HBaseMasterTable.NORM_ATTRIBUTES_FAMILY );
-            s.setFilter( HBaseSubLevelIndexCursor.getFilter( filter, store ) );
+
+            Filter filter = new SingleColumnValueFilter( HBaseMasterTable.TREE_INFO_FAMILY,
+                HBaseMasterTable.STATUS_QUALIFIER, CompareOp.EQUAL, HBaseMasterTable.EXISTS );
+            Filter mainFilter = HBaseSubLevelIndexCursor.getFilter( exprNode, store );
+            if ( mainFilter != null )
+            {
+                FilterList filterList = new FilterList( Operator.MUST_PASS_ALL );
+                filterList.addFilter( mainFilter );
+                filterList.addFilter( filter );
+                filter = filterList;
+            }
+
+            s.setFilter( filter );
             scanner = store.getMasterTable().getScanner( s );
             iterator = scanner.iterator();
         }
@@ -82,15 +99,18 @@ public class HBaseOneLevelIndexCursor ex
             Result next = iterator.next();
             byte[] id = next.getValue( HBaseMasterTable.TREE_INFO_FAMILY, HBaseMasterTable.ID_QUALIFIER );
 
+//            //NavigableMap<byte[],byte[]> familyMap = next.getFamilyMap( HBaseMasterTable.NORM_ATTRIBUTES_FAMILY );
+//            NavigableMap<byte[],byte[]> familyMap = next.getFamilyMap( HBaseMasterTable.TREE_INFO_FAMILY );
+//            for ( byte[] key : familyMap.keySet() )
+//            {
+//                byte[] value = familyMap.get( key );
+//                System.out.println(Utils.getPrintableString( key ) );
+//                System.out.println(Utils.getPrintableString( value ) );
+//            }
+            
             currentEntry = new ForwardIndexEntry<UUID, ServerEntry, UUID>();
             currentEntry.setId( Utils.toUUID( id ) );
             currentEntry.setValue( baseId );
-            if ( filter != null )
-            {
-                //                //ServerEntry entry = store.getMasterTable().convertToServerEntry( Bytes.toLong( id ), next );
-                //                ServerEntry entry = store.lookup( Bytes.toLong( id ) );
-                //                currentEntry.setObject( entry );
-            }
 
             return true;
         }

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBasePresenceIndexCursor.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBasePresenceIndexCursor.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBasePresenceIndexCursor.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBasePresenceIndexCursor.java Fri Mar 19 17:03:26 2010
@@ -28,6 +28,8 @@ import org.apache.directory.server.core.
 import org.apache.directory.server.xdbm.ForwardIndexEntry;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 
 
 /**
@@ -57,6 +59,8 @@ public class HBasePresenceIndexCursor ex
             byte[] start = presenceIndexTable.getPresenceKey( null );
             byte[] stop = Utils.incrementBytes( start );
             Scan s = new Scan( start, stop );
+            s.setFilter( new SingleColumnValueFilter( HBasePresenceIndexTable.INFO_FAMILY,
+                HBasePresenceIndexTable.STATUS_QUALIFIER, CompareOp.EQUAL, HBasePresenceIndexTable.EXISTS ) );
             s.addFamily( HBasePresenceIndexTable.INFO_FAMILY );
             scanner = presenceIndexTable.getScanner( s );
             iterator = scanner.iterator();

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseSubLevelIndexCursor.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseSubLevelIndexCursor.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseSubLevelIndexCursor.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseSubLevelIndexCursor.java Fri Mar 19 17:03:26 2010
@@ -26,11 +26,13 @@ import java.util.UUID;
 import java.util.regex.Pattern;
 
 import org.apache.directory.server.core.entry.ServerEntry;
+import org.apache.directory.server.core.normalization.ExpandingVisitor;
 import org.apache.directory.server.core.partition.hbase.HBaseStore;
 import org.apache.directory.server.core.partition.hbase.SingleColumnQualifierFilter;
 import org.apache.directory.server.core.partition.hbase.Utils;
 import org.apache.directory.server.core.partition.hbase.table.HBaseMasterTable;
 import org.apache.directory.server.xdbm.ForwardIndexEntry;
+import org.apache.directory.shared.ldap.filter.AndNode;
 import org.apache.directory.shared.ldap.filter.EqualityNode;
 import org.apache.directory.shared.ldap.filter.ExprNode;
 import org.apache.directory.shared.ldap.filter.LeafNode;
@@ -42,6 +44,7 @@ import org.apache.hadoop.hbase.filter.Bi
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.RegexStringComparator;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
 import org.apache.hadoop.hbase.filter.WritableByteArrayComparable;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.FilterList.Operator;
@@ -108,22 +111,34 @@ public class HBaseSubLevelIndexCursor ex
             scan.addFamily( HBaseMasterTable.TREE_INFO_FAMILY );
             scan.addFamily( HBaseMasterTable.NORM_ATTRIBUTES_FAMILY );
 
+            Filter existsFilter = new SingleColumnValueFilter( HBaseMasterTable.TREE_INFO_FAMILY,
+                HBaseMasterTable.STATUS_QUALIFIER, CompareOp.EQUAL, HBaseMasterTable.EXISTS );
             Filter mainFilter = getFilter( filter, store );
+            Filter filter = null;
             if ( mainFilter != null )
             {
                 // we need to get two kind of entries:
                 // - all entries that match the filter
-                // - all entries that have an children: one-level count > 0
+                // - all entries that have children, i.e. oneLevelCount > 0
                 // hence we use an or filter here
-                FilterList filterList = new FilterList( Operator.MUST_PASS_ONE );
-                filterList.addFilter( mainFilter );
                 BinaryComparator comparator = new BinaryComparator( HBaseMasterTable.ONE_LEVEL_COUNT_QUALIFIER );
                 SingleColumnQualifierFilter countFilter = new SingleColumnQualifierFilter(
                     HBaseMasterTable.TREE_INFO_FAMILY, CompareOp.EQUAL, comparator );
-                //SingleColumnValueFilter countFilter = new SingleColumnValueFilter( HBaseMasterTable.TREE_INFO_FAMILY, HBaseMasterTable.ONE_LEVEL_COUNT_QUALIFIER, CompareOp.GREATER, Bytes.toBytes( 0L ) );
-                filterList.addFilter( countFilter );
-                scan.setFilter( filterList );
+
+                FilterList innerFilerList = new FilterList( Operator.MUST_PASS_ONE );
+                innerFilerList.addFilter( mainFilter );
+                innerFilerList.addFilter( countFilter );
+
+                FilterList outerFilterList = new FilterList( Operator.MUST_PASS_ALL );
+                outerFilterList.addFilter( innerFilerList );
+                outerFilterList.addFilter( existsFilter );
+                filter = outerFilterList;
+            }
+            else
+            {
+                filter = existsFilter;
             }
+            scan.setFilter( filter );
 
             scanner = store.getMasterTable().getScanner( scan );
             iterator = scanner.iterator();
@@ -192,12 +207,6 @@ public class HBaseSubLevelIndexCursor ex
         currentEntry = new ForwardIndexEntry<UUID, ServerEntry, UUID>();
         currentEntry.setId( id );
         currentEntry.setValue( baseId );
-        if ( filter != null && next != null )
-        {
-            //            //ServerEntry entry = store.getMasterTable().convertToServerEntry( id, next );
-            //            ServerEntry entry = store.lookup( id );
-            //            currentEntry.setObject( entry );
-        }
     }
 
 
@@ -207,7 +216,16 @@ public class HBaseSubLevelIndexCursor ex
         {
             try
             {
-                return buildFilter( node, store );
+                // Make sure the filter is expanded to check all descendants
+                // of an attribute type. For example if the LDAP filter is 
+                // (name=test) it must be expanded to (|(cn=test)(ou=test)...)
+                AndNode containerNode = new AndNode( node );
+                ExpandingVisitor visitor = new ExpandingVisitor( store.getSchemaManager() );
+                containerNode.accept( visitor );
+                ExprNode expanded = containerNode.getChildren().get( 0 );
+                //System.out.println(expanded);
+
+                return buildFilter( expanded, store );
             }
             catch ( IllegalStateException e )
             {
@@ -280,7 +298,7 @@ public class HBaseSubLevelIndexCursor ex
                 case OR:
                     return buildOrFilter( ( OrNode ) node, store );
                 case AND:
-                    //return buildAndCursor( ( AndNode ) node );
+                    //return buildAndCursor( ( AndNode ) node, store );
                 case NOT:
                     //return new SkipFilter( buildFilter( ( ( NotNode ) node ).getFirstChild() ) );
                 default:
@@ -290,12 +308,12 @@ public class HBaseSubLevelIndexCursor ex
     }
 
 
-    //    private Filter buildAndCursor( AndNode node ) throws Exception
+    //    private static Filter buildAndCursor( AndNode node, HBaseStore store ) throws Exception
     //    {
     //        FilterList orList = new FilterList( Operator.MUST_PASS_ALL );
     //        for ( ExprNode child : node.getChildren() )
     //        {
-    //            orList.addFilter( buildFilter(child) );
+    //            orList.addFilter( buildFilter( child, store ) );
     //        }
     //        return orList;
     //    }

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseUserColumnIndexCursor.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseUserColumnIndexCursor.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseUserColumnIndexCursor.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseUserColumnIndexCursor.java Fri Mar 19 17:03:26 2010
@@ -36,10 +36,14 @@ import org.apache.directory.shared.ldap.
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.RegexStringComparator;
 import org.apache.hadoop.hbase.filter.RowFilter;
+import org.apache.hadoop.hbase.filter.ValueFilter;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.FilterList.Operator;
 
 
 /**
@@ -221,11 +225,19 @@ public class HBaseUserColumnIndexCursor 
             }
 
             Filter filter = null;
+            ValueFilter existsFilter = new ValueFilter( CompareOp.EQUAL, new BinaryComparator( HBaseIndexTable.EXISTS ) );
             if ( rowFilterPattern != null )
             {
                 RegexStringComparator comparator = new RegexStringComparator( rowFilterPattern.toString() );
                 RowFilter rowFilter = new RowFilter( CompareOp.EQUAL, comparator );
-                filter = rowFilter;
+                FilterList filterList = new FilterList( Operator.MUST_PASS_ALL );
+                filterList.addFilter( rowFilter );
+                filterList.addFilter( existsFilter );
+                filter = filterList;
+            }
+            else
+            {
+                filter = existsFilter;
             }
 
             Scan scan = new Scan( start, stop );

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseUserRowIndexCursor.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseUserRowIndexCursor.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseUserRowIndexCursor.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/cursor/HBaseUserRowIndexCursor.java Fri Mar 19 17:03:26 2010
@@ -33,9 +33,12 @@ import org.apache.directory.shared.ldap.
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.RegexStringComparator;
 import org.apache.hadoop.hbase.filter.RowFilter;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.FilterList.Operator;
 
 
 /**
@@ -159,11 +162,20 @@ public class HBaseUserRowIndexCursor ext
             }
 
             Filter filter = null;
+            SingleColumnValueFilter existsFilter = new SingleColumnValueFilter( HBaseIndexTable.INFO_FAMILY,
+                HBaseIndexTable.STATUS_QUALIFIER, CompareOp.EQUAL, HBaseIndexTable.EXISTS );
             if ( rowFilterPattern != null )
             {
                 RegexStringComparator comparator = new RegexStringComparator( rowFilterPattern.toString() );
                 RowFilter rowFilter = new RowFilter( CompareOp.EQUAL, comparator );
-                filter = rowFilter;
+                FilterList filterList = new FilterList( Operator.MUST_PASS_ALL );
+                filterList.addFilter( rowFilter );
+                filterList.addFilter( existsFilter );
+                filter = filterList;
+            }
+            else
+            {
+                filter = existsFilter;
             }
 
             Scan scan = new Scan( start, stop );

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/index/HBaseUserRowIndex.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/index/HBaseUserRowIndex.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/index/HBaseUserRowIndex.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/index/HBaseUserRowIndex.java Fri Mar 19 17:03:26 2010
@@ -43,6 +43,12 @@ public class HBaseUserRowIndex extends H
     }
 
 
+    public HBaseUserRowIndex( String attribute )
+    {
+        setAttributeId( attribute );
+    }
+
+
     @Override
     public void close() throws Exception
     {

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseColumnIndexTable.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseColumnIndexTable.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseColumnIndexTable.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseColumnIndexTable.java Fri Mar 19 17:03:26 2010
@@ -32,10 +32,12 @@ import org.apache.directory.shared.ldap.
 import org.apache.directory.shared.ldap.util.Base64;
 import org.apache.directory.shared.ldap.util.ByteBuffer;
 import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.filter.BinaryComparator;
+import org.apache.hadoop.hbase.filter.ValueFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -187,6 +189,7 @@ public class HBaseColumnIndexTable exten
 
         Get get = new Get( row );
         get.addFamily( INFO_FAMILY );
+        get.setFilter( new ValueFilter( CompareOp.EQUAL, new BinaryComparator( EXISTS ) ) );
         Result result = HBaseTableHelper.get( getIndexTablePool(), indexTableName, get );
         if ( result.getRow() == null )
         {
@@ -225,7 +228,7 @@ public class HBaseColumnIndexTable exten
         // exact match (attribute=value): =value -> id
         byte[] equalsKey = getEqualsKey( value );
         Put exactPut = new Put( equalsKey );
-        exactPut.add( INFO_FAMILY, Utils.toBytes( id ), DUMMY );
+        exactPut.add( INFO_FAMILY, Utils.toBytes( id ), EXISTS );
         HBaseTableHelper.put( getIndexTablePool(), indexTableName, exactPut );
 
         // TODO: optimize - don't need to clear the ẃhole cache
@@ -239,9 +242,9 @@ public class HBaseColumnIndexTable exten
     {
         // exact match (attribute=value): =value -> id
         byte[] equalsKey = getEqualsKey( value );
-        Delete exactDel = new Delete( equalsKey );
-        exactDel.deleteColumn( INFO_FAMILY, Utils.toBytes( id ) );
-        HBaseTableHelper.delete( getIndexTablePool(), indexTableName, exactDel );
+        Put exactPut = new Put( equalsKey );
+        exactPut.add( INFO_FAMILY, Utils.toBytes( id ), DELETED );
+        HBaseTableHelper.put( getIndexTablePool(), indexTableName, exactPut );
 
         // TODO: optimize - don't need to clear the ẃhole cache
         infoCache.clear();

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTable.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTable.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTable.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseIndexTable.java Fri Mar 19 17:03:26 2010
@@ -37,9 +37,11 @@ public interface HBaseIndexTable
 {
 
     public static final byte[] INFO_FAMILY = Bytes.toBytes( "info" );
-    public static final byte[] ID_QUALIFIER = Bytes.toBytes( "id" );
-    public static final byte[] DUMMY =
-        { 0x00 };
+    public static final byte[] STATUS_QUALIFIER = Bytes.toBytes( "status" );
+    public static final byte[] EXISTS =
+        { 'e' };
+    public static final byte[] DELETED =
+        { 'd' };
     public static final Byte DELIMITER = Byte.valueOf( ( byte ) 0x00 );
 
 

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseMasterTable.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseMasterTable.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseMasterTable.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseMasterTable.java Fri Mar 19 17:03:26 2010
@@ -45,7 +45,7 @@ import org.apache.directory.shared.ldap.
 import org.apache.directory.shared.ldap.schema.AttributeType;
 import org.apache.directory.shared.ldap.schema.SchemaManager;
 import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
@@ -72,6 +72,14 @@ public class HBaseMasterTable
     public static final byte[] PARENT_ID_QUALIFIER = Bytes.toBytes( "parentId" );
     public static final byte[] UP_RDN_QUALIFIER = Bytes.toBytes( "upRdn" );
     public static final byte[] NORM_RDN_QUALIFIER = Bytes.toBytes( "normRdn" );
+    public static final byte[] STATUS_QUALIFIER = Bytes.toBytes( "status" );
+    public static final byte[] EXISTS =
+        { 'e' };
+    public static final byte[] DELETED =
+        { 'd' };
+    public static final byte[] NULL =
+        { 0x00 };
+    public static final byte[] ZERO = Bytes.toBytes( 0l );
 
     public static final byte[] UP_ATTRIBUTES_FAMILY = Bytes.toBytes( "upAttributes" );
 
@@ -191,6 +199,7 @@ public class HBaseMasterTable
     {
         // put to master table
         Put masterPut = new Put( Utils.toBytes( id ) );
+        masterPut.add( TREE_INFO_FAMILY, STATUS_QUALIFIER, EXISTS );
         masterPut.add( TREE_INFO_FAMILY, PARENT_ID_QUALIFIER, Utils.toBytes( mti.parentId ) );
         masterPut.add( TREE_INFO_FAMILY, UP_RDN_QUALIFIER, Bytes.toBytes( mti.upName ) );
         masterPut.add( TREE_INFO_FAMILY, NORM_RDN_QUALIFIER, Bytes.toBytes( mti.normName ) );
@@ -216,6 +225,7 @@ public class HBaseMasterTable
     {
         // put to tree table
         Put treePut = new Put( mti.treeTableKey );
+        treePut.add( TREE_INFO_FAMILY, STATUS_QUALIFIER, EXISTS );
         treePut.add( TREE_INFO_FAMILY, ID_QUALIFIER, Utils.toBytes( id ) );
         for ( EntryAttribute attribute : entry )
         {
@@ -247,21 +257,7 @@ public class HBaseMasterTable
 
         if ( maintainCounters )
         {
-            // update parent one-level count
-            MasterTreeInfo parentKey = fetchMasterTreeInfo( mti.parentId );
-            if ( parentKey != null )
-            {
-                HBaseTableHelper.increment( getTreeTablePool(), treeTableName, parentKey.treeTableKey,
-                    TREE_INFO_FAMILY, ONE_LEVEL_COUNT_QUALIFIER );
-            }
-
-            // update all parents sub-level count
-            while ( parentKey != null )
-            {
-                HBaseTableHelper.increment( getTreeTablePool(), treeTableName, parentKey.treeTableKey,
-                    TREE_INFO_FAMILY, SUB_LEVEL_COUNT_QUALIFIER );
-                parentKey = fetchMasterTreeInfo( parentKey.parentId );
-            }
+            updateOneAndSubLevelCounters( mti.parentId, true );
 
             // clear caches
             oneLevelCountCache.clear();
@@ -276,67 +272,75 @@ public class HBaseMasterTable
     {
         MasterTreeInfo key = fetchMasterTreeInfo( id );
 
-        // delete in master table
-        Delete masterDel = new Delete( Utils.toBytes( id ) );
-        HBaseTableHelper.delete( getMasterTablePool(), masterTableName, masterDel );
-
-        // delete in tree table
-        Delete treeDel = new Delete( key.treeTableKey );
-        HBaseTableHelper.delete( getTreeTablePool(), treeTableName, treeDel );
+        nullout( getMasterTablePool(), masterTableName, Utils.toBytes( id ), TREE_INFO_FAMILY, UP_ATTRIBUTES_FAMILY );
+        nullout( getTreeTablePool(), treeTableName, key.treeTableKey, TREE_INFO_FAMILY, NORM_ATTRIBUTES_FAMILY );
 
         if ( maintainCounters )
         {
-            // update parent one-level count
-            UUID parentId = key.parentId;
-            if ( !parentId.equals( ROOT_ID ) )
+            updateOneAndSubLevelCounters( key.parentId, false );
+        }
+
+        // clear caches
+        mtiCache.clear();
+        idCache.clear();
+        dnCache.clear();
+        entryCache.clear();
+        oneLevelCountCache.clear();
+        subLevelCountCache.clear();
+    }
+
+
+    private void updateOneAndSubLevelCounters( UUID parentId, boolean increment ) throws Exception
+    {
+        // update parent one-level count
+        if ( !parentId.equals( ROOT_ID ) )
+        {
+            MasterTreeInfo parentKey = fetchMasterTreeInfo( parentId );
+            if ( increment )
+            {
+                HBaseTableHelper.increment( getTreeTablePool(), treeTableName, parentKey.treeTableKey,
+                    TREE_INFO_FAMILY, ONE_LEVEL_COUNT_QUALIFIER );
+            }
+            else
             {
-                MasterTreeInfo parentKey = fetchMasterTreeInfo( parentId );
                 HBaseTableHelper.decrement( getTreeTablePool(), treeTableName, parentKey.treeTableKey,
                     TREE_INFO_FAMILY, ONE_LEVEL_COUNT_QUALIFIER );
             }
+        }
 
-            // update sub-level count of all parents
-            while ( !parentId.equals( ROOT_ID ) )
+        // update sub-level count of all parents
+        while ( !parentId.equals( ROOT_ID ) )
+        {
+            MasterTreeInfo parentKey = fetchMasterTreeInfo( parentId );
+            if ( increment )
+            {
+                HBaseTableHelper.increment( getTreeTablePool(), treeTableName, parentKey.treeTableKey,
+                    TREE_INFO_FAMILY, SUB_LEVEL_COUNT_QUALIFIER );
+            }
+            else
             {
-                MasterTreeInfo parentKey = fetchMasterTreeInfo( parentId );
                 HBaseTableHelper.decrement( getTreeTablePool(), treeTableName, parentKey.treeTableKey,
                     TREE_INFO_FAMILY, SUB_LEVEL_COUNT_QUALIFIER );
-                parentId = parentKey.parentId;
             }
+            parentId = parentKey.parentId;
         }
-
-        // clear caches
-        mtiCache.clear();
-        idCache.clear();
-        dnCache.clear();
-        entryCache.clear();
-        oneLevelCountCache.clear();
-        subLevelCountCache.clear();
     }
 
 
     public void modify( UUID id, ServerEntry entry ) throws Exception
     {
         // TODO: replace quick-and-dirty implementation (delete+put) with with better algorithm
-        MasterTreeInfo ttk = fetchMasterTreeInfo( id );
+        MasterTreeInfo mti = fetchMasterTreeInfo( id );
 
-        // delete complete attribute family
-        Delete masterDel = new Delete( Utils.toBytes( id ) );
-        masterDel.deleteFamily( UP_ATTRIBUTES_FAMILY );
-        HBaseTableHelper.delete( getMasterTablePool(), masterTableName, masterDel );
-        Delete treeDel = new Delete( Utils.toBytes( id ) );
-        treeDel.deleteFamily( NORM_ATTRIBUTES_FAMILY );
-        HBaseTableHelper.delete( getTreeTablePool(), treeTableName, treeDel );
-
-        // dirty workaround:
-        // we need to avoid that the new attributes are written in the same 
-        // millisecond as the old attributes were deleted.
-        Thread.sleep( 20 );
+        nullout( getMasterTablePool(), masterTableName, Utils.toBytes( id ), UP_ATTRIBUTES_FAMILY, UP_ATTRIBUTES_FAMILY );
+        nullout( getTreeTablePool(), treeTableName, mti.treeTableKey, NORM_ATTRIBUTES_FAMILY, NORM_ATTRIBUTES_FAMILY );
 
         // add all attributes
         // TODO: duplicate code
         Put masterPut = new Put( Utils.toBytes( id ) );
-        Put treePut = new Put( ttk.treeTableKey );
+        masterPut.add( TREE_INFO_FAMILY, STATUS_QUALIFIER, EXISTS );
+        Put treePut = new Put( mti.treeTableKey );
+        treePut.add( TREE_INFO_FAMILY, STATUS_QUALIFIER, EXISTS );
         for ( EntryAttribute attribute : entry )
         {
             String attr = attribute.getUpId();
@@ -376,6 +380,98 @@ public class HBaseMasterTable
     }
 
 
+    public void move( UUID id, UUID newParentId, RDN newRdn ) throws Exception
+    {
+        MasterTreeInfo oldMti = fetchMasterTreeInfo( id );
+        MasterTreeInfo newMti = new MasterTreeInfo( oldMti.parentId, oldMti.normName, oldMti.upName );
+        if ( newParentId != null )
+        {
+            newMti = new MasterTreeInfo( newParentId, newMti.normName, newMti.upName );
+        }
+        if ( newRdn != null )
+        {
+            newMti = new MasterTreeInfo( newMti.parentId, newRdn.getNormName(), newRdn.getUpName() );
+        }
+
+        // master:treeInfo: set new parentId, upRdn and normRdn
+        Put masterPut = new Put( Utils.toBytes( id ) );
+        masterPut.add( TREE_INFO_FAMILY, PARENT_ID_QUALIFIER, Utils.toBytes( newMti.parentId ) );
+        masterPut.add( TREE_INFO_FAMILY, UP_RDN_QUALIFIER, Bytes.toBytes( newMti.upName ) );
+        masterPut.add( TREE_INFO_FAMILY, NORM_RDN_QUALIFIER, Bytes.toBytes( newMti.normName ) );
+        HBaseTableHelper.put( getMasterTablePool(), masterTableName, masterPut );
+
+        // tree: delete old row, add new row
+        renameInTree( oldMti, newMti );
+
+        // update parent counters is the entry has been moved to a new parent
+        if ( newParentId != null && maintainCounters )
+        {
+            updateOneAndSubLevelCounters( oldMti.parentId, false );
+            updateOneAndSubLevelCounters( newMti.parentId, true );
+        }
+    }
+
+
+    private void renameInTree( MasterTreeInfo oldMti, MasterTreeInfo newMti ) throws Exception
+    {
+        // tree: new row key! copy oneLevelCount and subLevelCount!
+        Get get = new Get( oldMti.treeTableKey );
+        get.addFamily( TREE_INFO_FAMILY );
+        get.addFamily( NORM_ATTRIBUTES_FAMILY );
+        Result result = HBaseTableHelper.get( getTreeTablePool(), treeTableName, get );
+
+        nullout( getTreeTablePool(), treeTableName, oldMti.treeTableKey, TREE_INFO_FAMILY, NORM_ATTRIBUTES_FAMILY );
+
+        Put treePut = new Put( newMti.treeTableKey );
+        //treePut.add( TREE_INFO_FAMILY, STATUS_QUALIFIER, EXISTS );
+        List<KeyValue> list = result.list();
+        for ( KeyValue kv : list )
+        {
+            treePut.add( kv.getFamily(), kv.getQualifier(), kv.getValue() );
+        }
+        HBaseTableHelper.put( getTreeTablePool(), treeTableName, treePut );
+
+        mtiCache.clear();
+        idCache.clear();
+        dnCache.clear();
+        entryCache.clear();
+        oneLevelCountCache.clear();
+        subLevelCountCache.clear();
+    }
+
+
+    private void nullout( HBaseTablePool pool, String tableName, byte[] row, byte[] f1, byte[] f2 ) throws Exception
+    {
+        Get get = new Get( row );
+        get.addFamily( f1 );
+        get.addFamily( f2 );
+        Result result = HBaseTableHelper.get( pool, tableName, get );
+
+        Put put = new Put( row );
+        put.add( TREE_INFO_FAMILY, STATUS_QUALIFIER, DELETED );
+        List<KeyValue> list = result.list();
+        for ( KeyValue kv : list )
+        {
+            byte[] family = kv.getFamily();
+            byte[] qualifier = kv.getQualifier();
+            if ( !put.has( family, qualifier ) )
+            {
+                if ( Bytes.equals( family, TREE_INFO_FAMILY ) //
+                    && ( Bytes.equals( qualifier, ONE_LEVEL_COUNT_QUALIFIER ) // 
+                    || Bytes.equals( qualifier, SUB_LEVEL_COUNT_QUALIFIER ) ) )
+                {
+                    put.add( family, qualifier, ZERO );
+                }
+                else
+                {
+                    put.add( family, qualifier, NULL );
+                }
+            }
+        }
+        HBaseTableHelper.put( pool, tableName, put );
+    }
+
+
     /**
      * Fetch the entry by its ID.
      * 
@@ -396,13 +492,11 @@ public class HBaseMasterTable
         }
 
         Get entryGet = new Get( Utils.toBytes( id ) );
-        entryGet.addColumn( TREE_INFO_FAMILY, ID_QUALIFIER );
-        entryGet.addColumn( TREE_INFO_FAMILY, PARENT_ID_QUALIFIER );
-        entryGet.addColumn( TREE_INFO_FAMILY, UP_RDN_QUALIFIER );
-        entryGet.addColumn( TREE_INFO_FAMILY, NORM_RDN_QUALIFIER );
-        entryGet.addColumn( UP_ATTRIBUTES_FAMILY );
+        //entryGet.setFilter( new SingleColumnValueFilter( TREE_INFO_FAMILY, STATUS_QUALIFIER, CompareOp.EQUAL, EXISTS ) );
+        entryGet.addFamily( TREE_INFO_FAMILY );
+        entryGet.addFamily( UP_ATTRIBUTES_FAMILY );
         Result result = HBaseTableHelper.get( getMasterTablePool(), masterTableName, entryGet );
-        if ( result.getRow() == null )
+        if ( result.getRow() == null || Bytes.equals( result.getValue( TREE_INFO_FAMILY, STATUS_QUALIFIER ), DELETED ) )
         {
             return null;
         }
@@ -424,6 +518,11 @@ public class HBaseMasterTable
         ServerEntry entry = new DefaultServerEntry( schemaManager, dn );
         for ( Map.Entry<byte[], byte[]> attributeEntry : attributesMap.entrySet() )
         {
+            if ( Bytes.equals( attributeEntry.getValue(), NULL ) )
+            {
+                continue;
+            }
+
             byte[] qualifier = attributeEntry.getKey();
             String attributeDescription = Bytes.toString( attributeEntry.getKey(), 0, qualifier.length - 4 );
 
@@ -523,9 +622,11 @@ public class HBaseMasterTable
     {
         MasterTreeInfo mti = fetchMasterTreeInfo( id );
         Get get = new Get( mti.treeTableKey );
+        //get.setFilter( new SingleColumnValueFilter( TREE_INFO_FAMILY, STATUS_QUALIFIER, CompareOp.EQUAL, EXISTS ) );
         get.addFamily( NORM_ATTRIBUTES_FAMILY );
         Result result = HBaseTableHelper.get( getTreeTablePool(), treeTableName, get );
         NavigableMap<byte[], byte[]> attributeMap = result.getFamilyMap( NORM_ATTRIBUTES_FAMILY );
+        // TODO: filter NULLs
         return attributeMap;
     }
 
@@ -550,6 +651,7 @@ public class HBaseMasterTable
         {
             return 0;
         }
+        // TODO: add filter?
         count = HBaseTableHelper.getLongValue( getTreeTablePool(), treeTableName, mti.treeTableKey, TREE_INFO_FAMILY,
             ONE_LEVEL_COUNT_QUALIFIER, 0L );
 
@@ -578,6 +680,7 @@ public class HBaseMasterTable
         {
             return 0;
         }
+        // TODO: add filter?
         count = HBaseTableHelper.getLongValue( getTreeTablePool(), treeTableName, mti.treeTableKey, TREE_INFO_FAMILY,
             SUB_LEVEL_COUNT_QUALIFIER, 0L );
 
@@ -673,6 +776,7 @@ public class HBaseMasterTable
             return id;
         }
 
+        // TODO: add filer
         id = HBaseTableHelper.getUUIDValue( getTreeTablePool(), treeTableName, mti.treeTableKey, TREE_INFO_FAMILY,
             ID_QUALIFIER, null );
         if ( id == null )
@@ -706,11 +810,10 @@ public class HBaseMasterTable
         }
 
         Get get = new Get( Utils.toBytes( id ) );
-        get.addColumn( TREE_INFO_FAMILY, PARENT_ID_QUALIFIER );
-        get.addColumn( TREE_INFO_FAMILY, NORM_RDN_QUALIFIER );
-        get.addColumn( TREE_INFO_FAMILY, UP_RDN_QUALIFIER );
+        //get.setFilter( new SingleColumnValueFilter( TREE_INFO_FAMILY, STATUS_QUALIFIER, CompareOp.EQUAL, EXISTS ) );
+        get.addFamily( TREE_INFO_FAMILY );
         Result result = HBaseTableHelper.get( getMasterTablePool(), masterTableName, get );
-        if ( result.getRow() == null )
+        if ( result.getRow() == null || Bytes.equals( result.getValue( TREE_INFO_FAMILY, STATUS_QUALIFIER ), DELETED ) )
         {
             return null;
         }

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBasePresenceIndexTable.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBasePresenceIndexTable.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBasePresenceIndexTable.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBasePresenceIndexTable.java Fri Mar 19 17:03:26 2010
@@ -27,11 +27,12 @@ import org.apache.directory.server.core.
 import org.apache.directory.server.core.partition.hbase.Utils;
 import org.apache.directory.server.core.partition.hbase.cursor.HBasePresenceIndexCursor;
 import org.apache.directory.shared.ldap.util.ByteBuffer;
-import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.util.Bytes;
 
 
@@ -47,9 +48,11 @@ public class HBasePresenceIndexTable
     //    private static final Logger LOG = LoggerFactory.getLogger( HBasePresenceIndexTable.class );
 
     public static final byte[] INFO_FAMILY = Bytes.toBytes( "info" );
-    public static final byte[] ID_QUALIFIER = Bytes.toBytes( "id" );
-    public static final byte[] DUMMY =
-        { 0x00 };
+    public static final byte[] STATUS_QUALIFIER = Bytes.toBytes( "status" );
+    public static final byte[] EXISTS =
+        { 'e' };
+    public static final byte[] DELETED =
+        { 'd' };
     public static final UUID VALUE_SCAN_FIRST_ENTRYID = UUID.fromString( "00000000-0000-0000-0000-000000000000" );
     public static final UUID VALUE_SCAN_LAST_ENTRYID = UUID.fromString( "ffffffff-ffff-ffff-ffff-ffffffffffff" );
 
@@ -154,6 +157,7 @@ public class HBasePresenceIndexTable
     {
         byte[] row = getPresenceKey( entryId );
         Get get = new Get( row );
+        get.setFilter( new SingleColumnValueFilter( INFO_FAMILY, STATUS_QUALIFIER, CompareOp.EQUAL, EXISTS ) );
         return HBaseTableHelper.exists( getIndexTablePool(), indexTableName, get );
     }
 
@@ -169,7 +173,7 @@ public class HBasePresenceIndexTable
         // presence (attribute=*): *<id> 
         byte[] presenceRow = getPresenceKey( entryId );
         Put presencePut = new Put( presenceRow );
-        presencePut.add( INFO_FAMILY, ID_QUALIFIER, DUMMY );
+        presencePut.add( INFO_FAMILY, STATUS_QUALIFIER, EXISTS );
         HBaseTableHelper.put( getIndexTablePool(), indexTableName, presencePut );
 
         countCache.clear();
@@ -180,8 +184,9 @@ public class HBasePresenceIndexTable
     {
         // presence (attribute=*): *<id> -> id
         byte[] presenceRow = getPresenceKey( entryId );
-        Delete presenceDel = new Delete( presenceRow );
-        HBaseTableHelper.delete( getIndexTablePool(), indexTableName, presenceDel );
+        Put presencePut = new Put( presenceRow );
+        presencePut.add( INFO_FAMILY, STATUS_QUALIFIER, DELETED );
+        HBaseTableHelper.put( getIndexTablePool(), indexTableName, presencePut );
 
         countCache.clear();
     }

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseRowIndexTable.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseRowIndexTable.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseRowIndexTable.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseRowIndexTable.java Fri Mar 19 17:03:26 2010
@@ -29,9 +29,10 @@ import org.apache.directory.server.core.
 import org.apache.directory.server.xdbm.IndexCursor;
 import org.apache.directory.shared.ldap.schema.AttributeType;
 import org.apache.directory.shared.ldap.util.ByteBuffer;
-import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -178,6 +179,7 @@ public class HBaseRowIndexTable extends 
             return false;
         }
         Get get = new Get( row );
+        get.setFilter( new SingleColumnValueFilter( INFO_FAMILY, STATUS_QUALIFIER, CompareOp.EQUAL, EXISTS ) );
         boolean exists = HBaseTableHelper.exists( getIndexTablePool(), indexTableName, get );
         existsCache.put( key, exists );
         return exists;
@@ -189,7 +191,7 @@ public class HBaseRowIndexTable extends 
         // exact match (attribute=value): =value<0x00><id> -> id, value
         byte[] exactRow = getEqualsKey( value, id );
         Put exactPut = new Put( exactRow );
-        exactPut.add( INFO_FAMILY, ID_QUALIFIER, DUMMY );
+        exactPut.add( INFO_FAMILY, STATUS_QUALIFIER, EXISTS );
         HBaseTableHelper.put( getIndexTablePool(), indexTableName, exactPut );
 
         // TODO: optimize - don't need to clear the ẃhole cache
@@ -200,10 +202,10 @@ public class HBaseRowIndexTable extends 
 
     public void drop( byte[] value, UUID id ) throws Exception
     {
-        // exact match (attribute=value): =value<0x00><id> -> id
         byte[] exactRow = getEqualsKey( value, id );
-        Delete exactDel = new Delete( exactRow );
-        HBaseTableHelper.delete( getIndexTablePool(), indexTableName, exactDel );
+        Put exactPut = new Put( exactRow );
+        exactPut.add( INFO_FAMILY, STATUS_QUALIFIER, DELETED );
+        HBaseTableHelper.put( getIndexTablePool(), indexTableName, exactPut );
 
         // TODO: optimize - don't need to clear the ẃhole cache
         countCache.clear();

Modified: directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTableHelper.java
URL: http://svn.apache.org/viewvc/directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTableHelper.java?rev=925331&r1=925330&r2=925331&view=diff
==============================================================================
--- directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTableHelper.java (original)
+++ directory/sandbox/seelmann/hbase-partition/src/main/java/org/apache/directory/server/core/partition/hbase/table/HBaseTableHelper.java Fri Mar 19 17:03:26 2010
@@ -226,7 +226,7 @@ public class HBaseTableHelper
             RPC_COUNT++;
             byte[] value = result.getValue( family, qualifier );
             UUID uuid;
-            if ( value == null )
+            if ( value == null || value.length != 36 )
             {
                 uuid = defaultValue;
             }