You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jackrabbit.apache.org by tr...@apache.org on 2007/02/01 15:26:34 UTC

svn commit: r502223 - in /jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core: ./ nodetype/

Author: tripod
Date: Thu Feb  1 06:26:33 2007
New Revision: 502223

URL: http://svn.apache.org/viewvc?view=rev&rev=502223
Log:
JCR-726: Improve NodeTypeRegistry.effectiveNodeType()

Added:
    jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/BitsetENTCacheImpl.java   (with props)
    jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCacheImpl.java   (with props)
Modified:
    jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/NodeImpl.java
    jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeType.java
    jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCache.java
    jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/NodeTypeRegistry.java

Modified: jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/NodeImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/NodeImpl.java?view=diff&rev=502223&r1=502222&r2=502223
==============================================================================
--- jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/NodeImpl.java (original)
+++ jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/NodeImpl.java Thu Feb  1 06:26:33 2007
@@ -860,12 +860,24 @@
      * @throws RepositoryException if an error occurs
      */
     public EffectiveNodeType getEffectiveNodeType() throws RepositoryException {
+        return getEffectiveNodeType(((NodeState) state).getMixinTypeNames());
+    }
+
+    /**
+     * Small optimization to void double call for mixin types.
+     *
+     * @param mixins the set of mixins
+     * @return the effective node type
+     * @throws RepositoryException if an error occurs
+     */
+    private EffectiveNodeType getEffectiveNodeType(Set mixins)
+            throws RepositoryException {
+
         // build effective node type of mixins & primary type
         NodeTypeRegistry ntReg = session.getNodeTypeManager().getNodeTypeRegistry();
-        // mixin types
-        Set set = ((NodeState) state).getMixinTypeNames();
-        QName[] types = new QName[set.size() + 1];
-        set.toArray(types);
+
+        QName[] types = new QName[mixins.size() + 1];
+        mixins.toArray(types);
         // primary type
         types[types.length - 1] = primaryTypeName;
         try {
@@ -1242,12 +1254,13 @@
         if (ntName.equals(primaryTypeName)) {
             return true;
         }
-        if (((NodeState) state).getMixinTypeNames().contains(ntName)) {
+        Set mixins = ((NodeState) state).getMixinTypeNames();
+        if (mixins.contains(ntName)) {
             return true;
         }
 
         // check effective node type
-        return getEffectiveNodeType().includesNodeType(ntName);
+        return getEffectiveNodeType(mixins).includesNodeType(ntName);
     }
 
     /**

Added: jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/BitsetENTCacheImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/BitsetENTCacheImpl.java?view=auto&rev=502223
==============================================================================
--- jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/BitsetENTCacheImpl.java (added)
+++ jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/BitsetENTCacheImpl.java Thu Feb  1 06:26:33 2007
@@ -0,0 +1,490 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.core.nodetype;
+
+import org.apache.jackrabbit.name.QName;
+
+import java.util.TreeSet;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.ArrayList;
+import java.io.PrintStream;
+
+import EDU.oswego.cs.dl.util.concurrent.ConcurrentReaderHashMap;
+
+/**
+ * Implements an effective node type cache that uses a bit set for storing the
+ * information about participating nodetypes in a set.
+ */
+public class BitsetENTCacheImpl implements EffectiveNodeTypeCache {
+
+    /**
+     * constant for bits-per-word
+     */
+    private static final int BPW = 64;
+
+    /**
+     * OR mask for bit set
+     */
+    private static final long[] OR_MASK = new long[BPW];
+    static {
+        for (int i=0; i<BPW; i++) {
+            OR_MASK[i] = 1L << i;
+        }
+    }
+
+    /**
+     * An ordered set of the keys. This is used for {@link #findBest(Key)}.
+     */
+    private final TreeSet sortedKeys;
+
+    /**
+     * cache of pre-built aggregations of node types
+     */
+    private final HashMap aggregates;
+
+    /**
+     * An lookup table for bit numbers for a given name.
+     *
+     * Note: further performance improvements could be made, if this index would
+     * be stored in the nodetype registry since only registered nodetype names
+     * are allowed in the keys.
+     */
+    private final ConcurrentReaderHashMap nameIndex = new ConcurrentReaderHashMap();
+
+    /**
+     * The reverse lookup table for bit numbers to names
+     */
+    private QName[] names = new QName[1024];
+
+    /**
+     * Creates a new bitset effective node type cache
+     */
+    BitsetENTCacheImpl() {
+        sortedKeys = new TreeSet();
+        aggregates = new HashMap();
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public Key getKey(QName[] ntNames) {
+        return new BitsetKey(ntNames, nameIndex.size() + ntNames.length);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public void put(EffectiveNodeType ent) {
+        put(getKey(ent.getMergedNodeTypes()), ent);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public void put(Key key, EffectiveNodeType ent) {
+        aggregates.put(key, ent);
+        sortedKeys.add(key);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public Key findBest(Key key) {
+        // quick check for already cached key
+        if (contains(key)) {
+            return key;
+        }
+        Iterator iter = sortedKeys.iterator();
+        while (iter.hasNext()) {
+            Key k = (Key) iter.next();
+            if (key.contains(k)) {
+                return k;
+            }
+        }
+        return null;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public void invalidate(QName name) {
+        /**
+         * remove all affected effective node types from aggregates cache
+         * (copy keys first to prevent ConcurrentModificationException)
+         */
+        ArrayList keys = new ArrayList(aggregates.keySet());
+        for (Iterator keysIter = keys.iterator(); keysIter.hasNext();) {
+            Key k = (Key) keysIter.next();
+            EffectiveNodeType ent = get(k);
+            if (ent.includesNodeType(name)) {
+                remove(k);
+            }
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public boolean contains(Key key) {
+        return aggregates.containsKey(key);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public EffectiveNodeType get(Key key) {
+        return (EffectiveNodeType) aggregates.get(key);
+    }
+
+    /**
+     * Returns the bit number for the given name. If the name does not exist
+     * a new new bit number for that name is created.
+     *
+     * @param name the name to lookup
+     * @return the bit number for the given name
+     */
+    private int getBitNumber(QName name) {
+        Integer i = (Integer) nameIndex.get(name);
+        if (i == null) {
+            synchronized (nameIndex) {
+                i = (Integer) nameIndex.get(name);
+                if (i == null) {
+                    int idx = nameIndex.size();
+                    i = new Integer(idx);
+                    nameIndex.put(name, i);
+                    if (idx >= names.length) {
+                        QName[] newNames = new QName[names.length*2];
+                        System.arraycopy(names, 0, newNames, 0, names.length);
+                        names = newNames;
+                    }
+                    names[idx] = name;
+                }
+            }
+        }
+        return i.intValue();
+    }
+
+    /**
+     * Returns the node type name for a given bit number.
+     * @param n the bit number to lookup
+     * @return the node type name
+     */
+    private QName getName(int n) {
+        return names[n];
+    }
+
+    /**
+     * Removes the effective node type for the given key from the cache.
+     *
+     * @param key the key of the effective node type to remove
+     * @return the removed effective node type or <code>null</code> if it was
+     *         never cached.
+     */
+    private EffectiveNodeType remove(Key key) {
+        EffectiveNodeType removed = (EffectiveNodeType) aggregates.remove(key);
+        if (removed != null) {
+            // other than the original implementation, the weights in the
+            // treeset are now the same as in the given keys. so we can use
+            // the normal remove method
+            sortedKeys.remove(key);
+        }
+        return removed;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public Object clone() {
+        BitsetENTCacheImpl clone = new BitsetENTCacheImpl();
+        clone.sortedKeys.addAll(sortedKeys);
+        clone.aggregates.putAll(aggregates);
+        clone.names = new QName[names.length];
+        System.arraycopy(names, 0, clone.names, 0, names.length);
+        clone.nameIndex.putAll(nameIndex);
+        return clone;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public void dump(PrintStream ps) {
+        ps.println("EffectiveNodeTypeCache (" + this + ")");
+        ps.println();
+        ps.println("EffectiveNodeTypes in cache:");
+        ps.println();
+        Iterator iter = sortedKeys.iterator();
+        while (iter.hasNext()) {
+            Key k = (Key) iter.next();
+            //EffectiveNodeType ent = (EffectiveNodeType) aggregates.get(k);
+            ps.println(k);
+        }
+    }
+
+    /**
+     * Implements a {@link Key} by storing the node type aggregate information
+     * in a bit set. We do not use the {@link java.util.BitSet} because it
+     * does not suite all our needs. Every node type is represented by a bit
+     * in the set. This key is immutable.
+     */
+    private class BitsetKey implements Key {
+
+        /**
+         * The names of the nodetypes that form this key.
+         */
+        private final QName[] names;
+
+        /**
+         * The array of longs that hold the bit information.
+         */
+        private final long[] bits;
+
+        /**
+         * the hashcode, only calculated once
+         */
+        private final int hashCode;
+
+        /**
+         * Creates a ew bitset key.
+         * @param names the nodetype names
+         * @param maxBit the approximative number of the geatest bit
+         */
+        public BitsetKey(QName[] names, int maxBit) {
+            this.names = names;
+            bits = new long[maxBit/BPW+1];
+
+            for (int i=0; i<names.length; i++) {
+                int n = getBitNumber(names[i]);
+                bits[n/BPW] |= OR_MASK[n%BPW];
+            }
+            hashCode = calcHashCode();
+        }
+
+        /**
+         * Creates new bitset key.
+         * @param bits the array if bits
+         * @param numBits the number of bits that are '1' in the given bis
+         */
+        private BitsetKey(long[] bits, int numBits) {
+            this.bits = bits;
+            names = new QName[numBits];
+            int i = nextSetBit(0);
+            int j=0;
+            while (i >= 0) {
+                names[j++] = BitsetENTCacheImpl.this.getName(i);
+                i = nextSetBit(i+1);
+            }
+            hashCode = calcHashCode();
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        public QName[] getNames() {
+            return names;
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        public boolean contains(Key otherKey) {
+            /*
+             * 0 - 0 => 0
+             * 0 - 1 => 1
+             * 1 - 0 => 0
+             * 1 - 1 => 0
+             * !a and b
+             */
+            BitsetKey other = (BitsetKey) otherKey;
+            int len = Math.max(bits.length, other.bits.length);
+            for (int i=0; i<len; i++) {
+                long w1 = i < bits.length ? bits[i] : 0;
+                long w2 = i < other.bits.length ? other.bits[i] : 0;
+                long r = ~w1 & w2;
+                if (r != 0) {
+                    return false;
+                }
+            }
+            return true;
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        public Key subtract(Key otherKey) {
+            /*
+             * 0 - 0 => 0
+             * 0 - 1 => 0
+             * 1 - 0 => 1
+             * 1 - 1 => 0
+             * a and !b
+             */
+            BitsetKey other = (BitsetKey) otherKey;
+            int len = Math.max(bits.length, other.bits.length);
+            long[] newBits = new long[len];
+            int numBits = 0;
+            for (int i=0; i<len; i++) {
+                long w1 = i < bits.length ? bits[i] : 0;
+                long w2 = i < other.bits.length ? other.bits[i] : 0;
+                newBits[i] = w1 & ~w2;
+                numBits += bitCount(newBits[i]);
+            }
+            return new BitsetKey(newBits, numBits);
+        }
+
+        /**
+         * Returns the bit number of the next bit that is set, starting at
+         * <code>fromIndex</code> inclusieve.
+         *
+         * @param fromIndex the bit position to start the search
+         * @return the bit position of the bit or -1 if none found.
+         */
+        private int nextSetBit(int fromIndex) {
+            int addr = fromIndex/BPW;
+            int off = fromIndex%BPW;
+            while (addr < bits.length) {
+                if (bits[addr] != 0) {
+                    while (off < BPW) {
+                        if ((bits[addr] & OR_MASK[off]) != 0) {
+                            return addr * BPW + off;
+                        }
+                        off++;
+                    }
+                    off=0;
+                }
+                addr++;
+            }
+            return -1;
+        }
+
+         /**
+          * Returns the number of bits set in val.
+          * For a derivation of this algorithm, see
+          * "Algorithms and data structures with applications to
+          *  graphics and geometry", by Jurg Nievergelt and Klaus Hinrichs,
+          *  Prentice Hall, 1993.
+          *
+          * @param val the value to calculate the bit count for
+          * @return the number of '1' bits in the value
+          */
+         private int bitCount(long val) {
+             val -= (val & 0xaaaaaaaaaaaaaaaaL) >>> 1;
+             val =  (val & 0x3333333333333333L) + ((val >>> 2) & 0x3333333333333333L);
+             val =  (val + (val >>> 4)) & 0x0f0f0f0f0f0f0f0fL;
+             val += val >>> 8;
+             val += val >>> 16;
+             return ((int)(val) + (int)(val >>> 32)) & 0xff;
+         }
+
+
+        /**
+         * {@inheritDoc}
+         *
+         * This compares 1. the cardinailty (number of set bits) and 2. the
+         * nummeric value of the bitsets in descending order.
+         */
+        public int compareTo(Object other) {
+            BitsetKey o = (BitsetKey) other;
+            int res = o.names.length - names.length;
+            if (res == 0) {
+                int adr = Math.max(bits.length, o.bits.length) - 1;
+                while (adr >= 0) {
+                    long w1 = adr<bits.length ? bits[adr] : 0;
+                    long w2 = adr<o.bits.length ? o.bits[adr] : 0;
+                    if (w1 != w2) {
+                        // some signed arithmetic here
+                        long h1 = w1 >>> 32;
+                        long h2 = w2 >>> 32;
+                        if (h1 == h2) {
+                            h1 = w1 & 0x0ffffL;
+                            h2 = w2 & 0x0ffffL;
+                        }
+                        return (int) (h2-h1);
+                    }
+                    adr--;
+                }
+            }
+            return res;
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        public boolean equals(Object obj) {
+            if (this == obj) {
+                return true;
+            }
+            if (obj instanceof BitsetKey) {
+                BitsetKey o = (BitsetKey) obj;
+                if (names.length != o.names.length) {
+                    return false;
+                }
+                int adr = Math.max(bits.length, o.bits.length) - 1;
+                while (adr >= 0) {
+                    long w1 = adr<bits.length ? bits[adr] : 0;
+                    long w2 = adr<o.bits.length ? o.bits[adr] : 0;
+                    if (w1 != w2) {
+                        return false;
+                    }
+                    adr--;
+                }
+                return true;
+            }
+            return false;
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        public int hashCode() {
+            return hashCode;
+        }
+
+        /**
+         * Calculates the hashcode.
+         * @return the calculated hashcode
+         */
+        private int calcHashCode() {
+            long h = 1234;
+            int addr = bits.length -1;
+            while (addr >=0 && bits[addr] == 0) {
+                addr--;
+            }
+            while (addr >=0) {
+                h ^= bits[addr] * (addr + 1);
+                addr--;
+            }
+            return (int)((h >> 32) ^ h);
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        public String toString() {
+            StringBuffer buf = new StringBuffer("w=");
+            buf.append(names.length);
+            int i = nextSetBit(0);
+            while (i>=0) {
+                buf.append(", ").append(i).append("=");
+                buf.append(BitsetENTCacheImpl.this.getName(i));
+                i = nextSetBit(i+1);
+            }
+            return buf.toString();
+        }
+
+    }
+}
\ No newline at end of file

Propchange: jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/BitsetENTCacheImpl.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/BitsetENTCacheImpl.java
------------------------------------------------------------------------------
    svn:keywords = author date id revision url rev

Modified: jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeType.java
URL: http://svn.apache.org/viewvc/jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeType.java?view=diff&rev=502223&r1=502222&r2=502223
==============================================================================
--- jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeType.java (original)
+++ jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeType.java Thu Feb  1 06:26:33 2007
@@ -199,7 +199,9 @@
         // resolve supertypes recursively
         QName[] supertypes = ntd.getSupertypes();
         if (supertypes.length > 0) {
-            ent.internalMerge(NodeTypeRegistry.getEffectiveNodeType(supertypes, entCache, ntdCache), true);
+            EffectiveNodeType base =
+                    NodeTypeRegistry.getEffectiveNodeType(supertypes, entCache, ntdCache);
+            ent.internalMerge(base, true);
         }
 
         // we're done

Modified: jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCache.java
URL: http://svn.apache.org/viewvc/jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCache.java?view=diff&rev=502223&r1=502222&r2=502223
==============================================================================
--- jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCache.java (original)
+++ jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCache.java Thu Feb  1 06:26:33 2007
@@ -19,297 +19,106 @@
 import org.apache.jackrabbit.core.util.Dumpable;
 import org.apache.jackrabbit.name.QName;
 
-import java.io.PrintStream;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.Set;
-import java.util.TreeSet;
-
 /**
- * <code>EffectiveNodeTypeCache</code> ...
+ * <code>EffectiveNodeTypeCache</code> defines the interface for a cache for
+ * effective node types. Effective node types are addressed by {@link Key}s.
  */
-class EffectiveNodeTypeCache implements Cloneable, Dumpable {
+public interface EffectiveNodeTypeCache extends Cloneable, Dumpable {
+
     /**
-     * ordered set of keys
+     * Puts an effective node type to the cache. The key is internally generated
+     * from the set of merged nodetypes.
+     * @param ent the effective node type to put to the cache
      */
-    private final TreeSet sortedKeys;
+    void put(EffectiveNodeType ent);
 
     /**
-     * cache of pre-built aggregations of node types
+     * Puts an effective node type to the cache for the given key.
+     * @param key the key for the effective node type
+     * @param ent the effective node type to put to the cache
      */
-    private final HashMap aggregates;
-
-    EffectiveNodeTypeCache() {
-        sortedKeys = new TreeSet();
-        aggregates = new HashMap();
-    }
-
-    void put(EffectiveNodeType ent) {
-        // we define the weight as the total number of included node types
-        // (through aggregation and inheritance)
-        int weight = ent.getAllNodeTypes().length;
-        // the effective node type is identified by the list of merged
-        // (i.e. aggregated) node types
-        WeightedKey k = new WeightedKey(ent.getMergedNodeTypes(), weight);
-        aggregates.put(k, ent);
-        sortedKeys.add(k);
-    }
-
-    boolean contains(QName[] ntNames) {
-        return aggregates.containsKey(new WeightedKey(ntNames));
-    }
-
-    boolean contains(WeightedKey key) {
-        return aggregates.containsKey(key);
-    }
-
-    EffectiveNodeType get(QName[] ntNames) {
-        return (EffectiveNodeType) aggregates.get(new WeightedKey(ntNames));
-    }
-
-    EffectiveNodeType get(WeightedKey key) {
-        return (EffectiveNodeType) aggregates.get(key);
-    }
-
-    EffectiveNodeType remove(QName[] ntNames) {
-        return remove(new WeightedKey(ntNames));
-    }
-
-    EffectiveNodeType remove(WeightedKey key) {
-        EffectiveNodeType removed = (EffectiveNodeType) aggregates.remove(key);
-        if (removed != null) {
-            // remove index entry
-
-            // FIXME: can't simply call TreeSet.remove(key) because the entry
-            // in sortedKeys might have a different weight and would thus
-            // not be found
-            Iterator iter = sortedKeys.iterator();
-            while (iter.hasNext()) {
-                WeightedKey k = (WeightedKey) iter.next();
-                // WeightedKey.equals(Object) ignores the weight
-                if (key.equals(k)) {
-                    sortedKeys.remove(k);
-                    break;
-                }
-            }
-        }
-        return removed;
-    }
+    void put(Key key, EffectiveNodeType ent);
 
     /**
-     * Returns an iterator over the keys. The order of the returned keys is:
-     * <ol>
-     * <li>descending weight</li>
-     * <li>ascending key (i.e. unique identifier of aggregate)</li>
-     * </ol>
-     *
-     * @see WeightedKey#compareTo
+     * Checks if the effective node type for the given key exists.
+     * @param key the key to check
+     * @return <code>true</code> if the effective node type is cached;
+     *         <code>false</code> otherwise.
      */
-    Iterator keyIterator() {
-        return sortedKeys.iterator();
-    }
+    boolean contains(Key key);
 
     /**
-     * Returns the set of keys.
-     *
-     * @return the set of keys.
+     * Returns the effective node type for the given key or <code>null</code> if
+     * the desired node type is not cached.
+     * @param key the key for the effective node type.
+     * @return the effective node type or <code>null</code>
      */
-    Set keySet() {
-        return Collections.unmodifiableSet(sortedKeys);
-    }
-
-    //-------------------------------------------< java.lang.Object overrides >
-    public Object clone() {
-        EffectiveNodeTypeCache clone = new EffectiveNodeTypeCache();
-        clone.sortedKeys.addAll(sortedKeys);
-        clone.aggregates.putAll(aggregates);
-        return clone;
-    }
+    EffectiveNodeType get(Key key);
 
-    //-------------------------------------------------------------< Dumpable >
     /**
-     * {@inheritDoc}
+     * Returns a key for an effective node type that consists of the given
+     * node type names.
+     * @param ntNames the array of node type names for the effective node type
+     * @return the key to an effective node type.
      */
-    public void dump(PrintStream ps) {
-        ps.println("EffectiveNodeTypeCache (" + this + ")");
-        ps.println();
-        ps.println("EffectiveNodeTypes in cache:");
-        ps.println();
-        Iterator iter = sortedKeys.iterator();
-        while (iter.hasNext()) {
-            WeightedKey k = (WeightedKey) iter.next();
-            //EffectiveNodeType ent = (EffectiveNodeType) aggregates.get(k);
-            ps.println(k);
-        }
-    }
+    Key getKey(QName[] ntNames);
 
-    //--------------------------------------------------------< inner classes >
     /**
-     * A <code>WeightedKey</code> uniquely identifies
-     * a combination (i.e. an aggregation) of one or more node types.
-     * The weight is an indicator for the cost involved in building such an
-     * aggregate (e.g. an aggregation of multiple complex node types with deep
-     * inheritance trees is more costly to build/validate than an agreggation
-     * of two very simple node types with just one property definition each).
-     * <p/>
-     * A very simple (and not very accurate) approximation of the weight would
-     * be the number of explicitly aggregated node types (ignoring inheritance
-     * and complexity of each involved node type). A better approximation would
-     * be the number of <b>all</b>, explicitly and implicitly (note that
-     * inheritance is also an aggregation) aggregated node types.
-     * <p/>
-     * The more accurate the weight definition, the more efficient is the
-     * the building of new aggregates.
-     * <p/>
-     * It is important to note that the weight is not part of the key value,
-     * i.e. it is not considered by the <code>hashCode()</code> and
-     * <code>equals(Object)</code> methods. It does however affect the order
-     * of <code>WeightedKey</code> instances. See
-     * <code>{@link #compareTo(Object)}</code> for more information.
-     * <p/>
-     * Let's assume we have an aggregation of node types named "b", "a" and "c".
-     * Its key would be "[a, b, c]" and the weight 3 (using the simple
-     * approximation).
+     * Removes all effective node types that are aggregated with the node type
+     * of the given name.
+     * @param name the name of the node type.
      */
-    static class WeightedKey implements Comparable {
-
-        /**
-         * array of node type names, sorted in ascending order
-         */
-        private final QName[] names;
-
-        /**
-         * the weight of this key
-         */
-        private final int weight;
-
-        /**
-         * @param ntNames
-         */
-        WeightedKey(QName[] ntNames) {
-            this(ntNames, ntNames.length);
-        }
+    void invalidate(QName name);
 
-        /**
-         * @param ntNames
-         * @param weight
-         */
-        WeightedKey(QName[] ntNames, int weight) {
-            this.weight = weight;
-            names = new QName[ntNames.length];
-            System.arraycopy(ntNames, 0, names, 0, names.length);
-            Arrays.sort(names);
-        }
+    /**
+     * {@inheritDoc}
+     */
+    Object clone();
 
-        /**
-         * @param ntNames
-         */
-        WeightedKey(Collection ntNames) {
-            this(ntNames, ntNames.size());
-        }
+    /**
+     * Searches the best key k for which the given <code>key</code> is a super
+     * set, i.e. for which {@link Key#contains(Key)}} returns
+     * <code>true</code>. If an already cached effective node type matches the
+     * key it is returned.
+     *
+     * @param key the key for which the subkey is to be searched
+     * @return the best key or <code>null</code> if no key could be found.
+     */
+    Key findBest(Key key);
 
-        /**
-         * @param ntNames
-         * @param weight
-         */
-        WeightedKey(Collection ntNames, int weight) {
-            this((QName[]) ntNames.toArray(new QName[ntNames.size()]), weight);
-        }
+    /**
+    * An <code>ENTKey</code> uniquely identifies
+    * a combination (i.e. an aggregation) of one or more node types.
+    */
+    interface Key extends Comparable {
 
         /**
-         * @return the weight of this key
+         * Returns the node type names of this key.
+         * @return the node type names of this key.
          */
-        int getWeight() {
-            return weight;
-        }
+        QName[] getNames();
 
         /**
-         * @return the node type names of this key
+         * Checks if the <code>otherKey</code> is contained in this one. I.e. if
+         * this key contains all node type names of the other key.
+         * @param otherKey the other key to check
+         * @return <code>true</code> if this key contains the other key;
+         *         <code>false</code> otherwise.
          */
-        QName[] getNames() {
-            return names;
-        }
-
-        boolean contains(WeightedKey otherKey) {
-            Set tmp = new HashSet(Arrays.asList(names));
-            for (int i = 0; i < otherKey.names.length; i++) {
-                if (!tmp.contains(otherKey.names[i])) {
-                    return false;
-                }
-            }
-            return true;
-        }
-
-        WeightedKey subtract(WeightedKey otherKey) {
-            Set tmp = new HashSet(Arrays.asList(names));
-            tmp.removeAll(Arrays.asList(otherKey.names));
-            return new WeightedKey(tmp);
-
-        }
+        boolean contains(Key otherKey);
 
-        //-------------------------------------------------------< Comparable >
         /**
-         * The resulting sort-order is: 1. descending weight, 2. ascending key
-         * (i.e. string representation of this sorted set).
+         * Creates a new key as a result of a subtract operation. i.e. removes all
+         * node type names that from the other key.
+         * <p/>
+         * Please note that no exception is thrown if the other key has node type
+         * names that are not contained in this key (i.e. {@link #contains(Key)}
+         * returns <code>false</code>).
          *
-         * @param o
-         * @return the result of the comparison
+         * @param otherKey the other key to subtract
+         * @return the new key of the subtraction operation.
          */
-        public int compareTo(Object o) {
-            WeightedKey other = (WeightedKey) o;
+        Key subtract(Key otherKey);
 
-            // compare weights
-            if (weight > other.weight) {
-                return -1;
-            } else if (weight < other.weight) {
-                return 1;
-            }
-
-            // compare arrays of names
-            int len1 = names.length;
-            int len2 = other.names.length;
-            int len = Math.min(len1, len2);
-
-            for (int i = 0; i < len; i++) {
-                QName name1 = names[i];
-                QName name2 = other.names[i];
-                int result = name1.compareTo(name2);
-                if (result != 0) {
-                    return result;
-                }
-            }
-            return len1 - len2;
-        }
-
-        //---------------------------------------< java.lang.Object overrides >
-        public int hashCode() {
-            int h = 17;
-            // ignore weight
-            for (int i = 0; i < names.length; i++) {
-                h *= 37;
-                h += names[i].hashCode();
-            }
-            return h;
-        }
-
-        public boolean equals(Object obj) {
-            if (this == obj) {
-                return true;
-            }
-            if (obj instanceof WeightedKey) {
-                WeightedKey other = (WeightedKey) obj;
-                // ignore weight
-                return Arrays.equals(names, other.names);
-            }
-            return false;
-        }
-
-        public String toString() {
-            return Arrays.asList(names).toString() + " (" + weight + ")";
-        }
     }
 }

Added: jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCacheImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCacheImpl.java?view=auto&rev=502223
==============================================================================
--- jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCacheImpl.java (added)
+++ jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCacheImpl.java Thu Feb  1 06:26:33 2007
@@ -0,0 +1,366 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.core.nodetype;
+
+import org.apache.jackrabbit.name.QName;
+
+import java.io.PrintStream;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.ArrayList;
+
+/**
+ * <code>EffectiveNodeTypeCache</code> implementation that uses an array of
+ * node type names as key for caching the effective node types.
+ */
+public class EffectiveNodeTypeCacheImpl implements EffectiveNodeTypeCache {
+
+    /**
+     * ordered set of keys
+     */
+    private final TreeSet sortedKeys;
+
+    /**
+     * cache of pre-built aggregations of node types
+     */
+    private final HashMap aggregates;
+
+    /**
+     * Creates a new effective node type cache.
+     */
+    EffectiveNodeTypeCacheImpl() {
+        sortedKeys = new TreeSet();
+        aggregates = new HashMap();
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public Key getKey(QName[] ntNames) {
+        return new WeightedKey(ntNames);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public void put(EffectiveNodeType ent) {
+        // we define the weight as the total number of included node types
+        // (through aggregation and inheritance)
+        int weight = ent.getMergedNodeTypes().length;
+        // the effective node type is identified by the list of merged
+        // (i.e. aggregated) node types
+        WeightedKey k = new WeightedKey(ent.getMergedNodeTypes(), weight);
+        put(k, ent);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public void put(Key key, EffectiveNodeType ent) {
+        aggregates.put(key, ent);
+        sortedKeys.add(key);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public boolean contains(Key key) {
+        return aggregates.containsKey(key);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public EffectiveNodeType get(Key key) {
+        return (EffectiveNodeType) aggregates.get(key);
+    }
+
+    /**
+     * Removes the effective node type for the given key from the cache.
+     * @param key the key of the effective node type to remove
+     * @return the removed effective node type or <code>null</code> if it was
+     *         never cached.
+     */
+    private EffectiveNodeType remove(Key key) {
+        EffectiveNodeType removed = (EffectiveNodeType) aggregates.remove(key);
+        if (removed != null) {
+            // remove index entry
+
+            // FIXME: can't simply call TreeSet.remove(key) because the entry
+            // in sortedKeys might have a different weight and would thus
+            // not be found
+            Iterator iter = sortedKeys.iterator();
+            while (iter.hasNext()) {
+                Key k = (Key) iter.next();
+                // WeightedKey.equals(Object) ignores the weight
+                if (key.equals(k)) {
+                    sortedKeys.remove(k);
+                    break;
+                }
+            }
+        }
+        return removed;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public void invalidate(QName name) {
+        // remove all affected effective node types from aggregates cache
+        // (copy keys first to prevent ConcurrentModificationException)
+        ArrayList keys = new ArrayList(sortedKeys);
+        for (Iterator keysIter = keys.iterator(); keysIter.hasNext();) {
+            Key k = (Key) keysIter.next();
+            EffectiveNodeType ent = get(k);
+            if (ent.includesNodeType(name)) {
+                remove(k);
+            }
+        }
+    }
+
+    /**
+     * @inheritDoc
+     */
+    public Key findBest(Key key) {
+        // quick check for already cached key
+        if (contains(key)) {
+            return key;
+        }
+        Iterator iter = sortedKeys.iterator();
+        while (iter.hasNext()) {
+            Key k = (Key) iter.next();
+            // check if the existing aggregate is a 'subset' of the one we're
+            // looking for
+            if (key.contains(k)) {
+                return k;
+            }
+        }
+        return null;
+    }
+
+    //-------------------------------------------< java.lang.Object overrides >
+
+    /**
+     * {@inheritDoc}
+     */
+    public Object clone() {
+        EffectiveNodeTypeCacheImpl clone = new EffectiveNodeTypeCacheImpl();
+        clone.sortedKeys.addAll(sortedKeys);
+        clone.aggregates.putAll(aggregates);
+        return clone;
+    }
+
+    //-------------------------------------------------------------< Dumpable >
+
+    /**
+     * {@inheritDoc}
+     */
+    public void dump(PrintStream ps) {
+        ps.println("EffectiveNodeTypeCache (" + this + ")");
+        ps.println();
+        ps.println("EffectiveNodeTypes in cache:");
+        ps.println();
+        Iterator iter = sortedKeys.iterator();
+        while (iter.hasNext()) {
+            Key k = (Key) iter.next();
+            //EffectiveNodeType ent = (EffectiveNodeType) aggregates.get(k);
+            ps.println(k);
+        }
+    }
+
+    //--------------------------------------------------------< inner classes >
+    /**
+     * A <code>WeightedKey</code> uniquely identifies
+     * a combination (i.e. an aggregation) of one or more node types.
+     * The weight is an indicator for the cost involved in building such an
+     * aggregate (e.g. an aggregation of multiple complex node types with deep
+     * inheritance trees is more costly to build/validate than an agreggation
+     * of two very simple node types with just one property definition each).
+     * <p/>
+     * A very simple (and not very accurate) approximation of the weight would
+     * be the number of explicitly aggregated node types (ignoring inheritance
+     * and complexity of each involved node type). A better approximation would
+     * be the number of <b>all</b>, explicitly and implicitly (note that
+     * inheritance is also an aggregation) aggregated node types.
+     * <p/>
+     * The more accurate the weight definition, the more efficient is the
+     * the building of new aggregates.
+     * <p/>
+     * It is important to note that the weight is not part of the key value,
+     * i.e. it is not considered by the <code>hashCode()</code> and
+     * <code>equals(Object)</code> methods. It does however affect the order
+     * of <code>WeightedKey</code> instances. See
+     * <code>{@link #compareTo(Object)}</code> for more information.
+     * <p/>
+     * Let's assume we have an aggregation of node types named "b", "a" and "c".
+     * Its key would be "[a, b, c]" and the weight 3 (using the simple
+     * approximation).
+     */
+    private static class WeightedKey implements Key {
+
+        /**
+         * array of node type names, sorted in ascending order
+         */
+        private final QName[] names;
+
+        /**
+         * the weight of this key
+         */
+        private final int weight;
+
+        /**
+         * @param ntNames
+         */
+        WeightedKey(QName[] ntNames) {
+            this(ntNames, ntNames.length);
+        }
+
+        /**
+         * @param ntNames
+         * @param weight
+         */
+        WeightedKey(QName[] ntNames, int weight) {
+            this.weight = weight;
+            names = new QName[ntNames.length];
+            System.arraycopy(ntNames, 0, names, 0, names.length);
+            Arrays.sort(names);
+        }
+
+        /**
+         * @param ntNames
+         */
+        WeightedKey(Collection ntNames) {
+            this(ntNames, ntNames.size());
+        }
+
+        /**
+         * @param ntNames
+         * @param weight
+         */
+        WeightedKey(Collection ntNames, int weight) {
+            this((QName[]) ntNames.toArray(new QName[ntNames.size()]), weight);
+        }
+
+        /**
+         * @return the node type names of this key
+         */
+        public QName[] getNames() {
+            return names;
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        public boolean contains(Key otherKey) {
+            WeightedKey key = (WeightedKey) otherKey;
+            Set tmp = new HashSet(Arrays.asList(names));
+            for (int i = 0; i < key.names.length; i++) {
+                if (!tmp.contains(key.names[i])) {
+                    return false;
+                }
+            }
+            return true;
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        public Key subtract(Key otherKey) {
+            WeightedKey key = (WeightedKey) otherKey;
+            Set tmp = new HashSet(Arrays.asList(names));
+            tmp.removeAll(Arrays.asList(key.names));
+            return new WeightedKey(tmp);
+
+        }
+
+        //-------------------------------------------------------< Comparable >
+        /**
+         * The resulting sort-order is: 1. descending weight, 2. ascending key
+         * (i.e. string representation of this sorted set).
+         *
+         * @param o the other key to compare
+         * @return the result of the comparison
+         */
+        public int compareTo(Object o) {
+            WeightedKey other = (WeightedKey) o;
+
+            // compare weights
+            if (weight > other.weight) {
+                return -1;
+            } else if (weight < other.weight) {
+                return 1;
+            }
+
+            // compare arrays of names
+            int len1 = names.length;
+            int len2 = other.names.length;
+            int len = Math.min(len1, len2);
+
+            for (int i = 0; i < len; i++) {
+                QName name1 = names[i];
+                QName name2 = other.names[i];
+                int result = name1.compareTo(name2);
+                if (result != 0) {
+                    return result;
+                }
+            }
+            return len1 - len2;
+        }
+
+        //---------------------------------------< java.lang.Object overrides >
+
+        /**
+         * {@inheritDoc}
+         */
+        public int hashCode() {
+            int h = 17;
+            // ignore weight
+            for (int i = 0; i < names.length; i++) {
+                h *= 37;
+                h += names[i].hashCode();
+            }
+            return h;
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        public boolean equals(Object obj) {
+            if (this == obj) {
+                return true;
+            }
+            if (obj instanceof WeightedKey) {
+                WeightedKey other = (WeightedKey) obj;
+                // ignore weight
+                return Arrays.equals(names, other.names);
+            }
+            return false;
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        public String toString() {
+            return Arrays.asList(names).toString() + " (" + weight + ")";
+        }
+    }
+}
\ No newline at end of file

Propchange: jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCacheImpl.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/EffectiveNodeTypeCacheImpl.java
------------------------------------------------------------------------------
    svn:keywords = author date id revision url rev

Modified: jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/NodeTypeRegistry.java
URL: http://svn.apache.org/viewvc/jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/NodeTypeRegistry.java?view=diff&rev=502223&r1=502222&r2=502223
==============================================================================
--- jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/NodeTypeRegistry.java (original)
+++ jackrabbit/trunk/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/nodetype/NodeTypeRegistry.java Thu Feb  1 06:26:33 2007
@@ -16,29 +16,23 @@
  */
 package org.apache.jackrabbit.core.nodetype;
 
+import EDU.oswego.cs.dl.util.concurrent.ConcurrentReaderHashMap;
 import org.apache.commons.collections.map.ReferenceMap;
+import org.apache.jackrabbit.core.cluster.NodeTypeEventChannel;
+import org.apache.jackrabbit.core.cluster.NodeTypeEventListener;
 import org.apache.jackrabbit.core.fs.FileSystem;
 import org.apache.jackrabbit.core.fs.FileSystemException;
 import org.apache.jackrabbit.core.fs.FileSystemResource;
 import org.apache.jackrabbit.core.util.Dumpable;
 import org.apache.jackrabbit.core.value.InternalValue;
-import org.apache.jackrabbit.core.cluster.NodeTypeEventChannel;
-import org.apache.jackrabbit.core.cluster.NodeTypeEventListener;
 import org.apache.jackrabbit.name.QName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.jcr.NamespaceRegistry;
-import javax.jcr.PropertyType;
-import javax.jcr.RepositoryException;
-import javax.jcr.nodetype.ConstraintViolationException;
-import javax.jcr.nodetype.NoSuchNodeTypeException;
-import javax.jcr.version.OnParentVersionAction;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.PrintStream;
-import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
@@ -48,7 +42,12 @@
 import java.util.Set;
 import java.util.Stack;
 
-import EDU.oswego.cs.dl.util.concurrent.ConcurrentReaderHashMap;
+import javax.jcr.NamespaceRegistry;
+import javax.jcr.PropertyType;
+import javax.jcr.RepositoryException;
+import javax.jcr.nodetype.ConstraintViolationException;
+import javax.jcr.nodetype.NoSuchNodeTypeException;
+import javax.jcr.version.OnParentVersionAction;
 
 /**
  * A <code>NodeTypeRegistry</code> ...
@@ -76,7 +75,7 @@
     private final NodeTypeDefStore customNTDefs;
 
     // cache of pre-built aggregations of node types
-    private final EffectiveNodeTypeCache entCache;
+    private EffectiveNodeTypeCache entCache;
 
     // map of node type names and node type definitions
     private final ConcurrentReaderHashMap registeredNTDefs;
@@ -165,8 +164,8 @@
      *
      * @param ntd the definition of the new node type
      * @return an <code>EffectiveNodeType</code> instance
-     * @throws InvalidNodeTypeDefException
-     * @throws RepositoryException
+     * @throws InvalidNodeTypeDefException if the given node type definition is invalid.
+     * @throws RepositoryException if a repository error occurs.
      */
     public synchronized EffectiveNodeType registerNodeType(NodeTypeDef ntd)
             throws InvalidNodeTypeDefException, RepositoryException {
@@ -198,8 +197,8 @@
      * dependencies on each other.
      *
      * @param ntDefs a collection of <code>NodeTypeDef<code> objects
-     * @throws InvalidNodeTypeDefException
-     * @throws RepositoryException
+     * @throws InvalidNodeTypeDefException if the given node type definition is invalid.
+     * @throws RepositoryException if a repository error occurs.
      */
     public synchronized void registerNodeTypes(Collection ntDefs)
             throws InvalidNodeTypeDefException, RepositoryException {
@@ -256,8 +255,7 @@
             dependents.removeAll(ntNames);
             if (dependents.size() > 0) {
                 StringBuffer msg = new StringBuffer();
-                msg.append(ntName
-                        + " can not be removed because the following node types depend on it: ");
+                msg.append(ntName).append(" can not be removed because the following node types depend on it: ");
                 for (Iterator depIter = dependents.iterator(); depIter.hasNext();) {
                     msg.append(depIter.next());
                     msg.append(" ");
@@ -643,7 +641,10 @@
             throw new RepositoryException(error, fse);
         }
 
-        entCache = new EffectiveNodeTypeCache();
+        // use the improved node type cache
+        // (replace with: entCache = new EffectiveNodeTypeCacheImpl();
+        // for the old one)
+        entCache = new BitsetENTCacheImpl();
         registeredNTDefs = new ConcurrentReaderHashMap();
         propDefs = new ConcurrentReaderHashMap();
         nodeDefs = new ConcurrentReaderHashMap();
@@ -922,7 +923,8 @@
                                                   Map ntdCache)
             throws NoSuchNodeTypeException {
         // 1. check if effective node type has already been built
-        EffectiveNodeType ent = entCache.get(new QName[]{ntName});
+        EffectiveNodeTypeCache.Key key = entCache.getKey(new QName[]{ntName});
+        EffectiveNodeType ent = entCache.get(key);
         if (ent != null) {
             return ent;
         }
@@ -955,7 +957,7 @@
      * @param ntNames  array of node type names
      * @param entCache cache of already-built effective node types
      * @param ntdCache cache of node type definitions
-     * @return
+     * @return the desired effective node type
      * @throws NodeTypeConflictException if the effective node type representation
      *                                   could not be built due to conflicting
      *                                   node type definitions.
@@ -967,8 +969,7 @@
                                                   Map ntdCache)
             throws NodeTypeConflictException, NoSuchNodeTypeException {
 
-        EffectiveNodeTypeCache.WeightedKey key =
-                new EffectiveNodeTypeCache.WeightedKey(ntNames);
+        EffectiveNodeTypeCache.Key key = entCache.getKey(ntNames);
 
         // 1. check if aggregate has already been built
         if (entCache.contains(key)) {
@@ -983,41 +984,25 @@
         }
 
         // 3. build aggregate
+        EffectiveNodeTypeCache.Key requested = key;
         EffectiveNodeType result = null;
         synchronized (entCache) {
             // build list of 'best' existing sub-aggregates
-            ArrayList tmpResults = new ArrayList();
             while (key.getNames().length > 0) {
-                // check if we've already built this aggregate
-                if (entCache.contains(key)) {
-                    tmpResults.add(entCache.get(key));
-                    // subtract the result from the temporary key
-                    // (which is 'empty' now)
-                    key = key.subtract(key);
-                    break;
-                }
-                /**
-                 * walk list of existing aggregates sorted by 'weight' of
-                 * aggregate (i.e. the cost of building it)
-                 */
-                boolean foundSubResult = false;
-                Iterator iter = entCache.keyIterator();
-                while (iter.hasNext()) {
-                    EffectiveNodeTypeCache.WeightedKey k =
-                            (EffectiveNodeTypeCache.WeightedKey) iter.next();
-                    /**
-                     * check if the existing aggregate is a 'subset' of the one
-                     * we're looking for
-                     */
-                    if (key.contains(k)) {
-                        tmpResults.add(entCache.get(k));
-                        // subtract the result from the temporary key
-                        key = key.subtract(k);
-                        foundSubResult = true;
-                        break;
+                // find the (sub) key that matches the current key the best
+                EffectiveNodeTypeCache.Key subKey = entCache.findBest(key);
+                if (subKey != null) {
+                    EffectiveNodeType ent = entCache.get(subKey);
+                    if (result == null) {
+                        result = ent;
+                    } else {
+                        result = result.merge(ent);
+                        // store intermediate result
+                        entCache.put(result);
                     }
-                }
-                if (!foundSubResult) {
+                    // subtract the result from the temporary key
+                    key = key.subtract(subKey);
+                } else {
                     /**
                      * no matching sub-aggregates found:
                      * build aggregate of remaining node types through iteration
@@ -1037,21 +1022,14 @@
                             entCache.put(result);
                         }
                     }
-                    // add aggregate of remaining node types to result list
-                    tmpResults.add(result);
                     break;
                 }
             }
-            // merge the sub-aggregates into new effective node type
-            for (int i = 0; i < tmpResults.size(); i++) {
-                if (result == null) {
-                    result = (EffectiveNodeType) tmpResults.get(i);
-                } else {
-                    result = result.merge((EffectiveNodeType) tmpResults.get(i));
-                    // store intermediate result
-                    entCache.put(result);
-                }
-            }
+        }
+        // also put the requested key, since the merge could have removed some
+        // the redundant nodetypes
+        if (!entCache.contains(requested)) {
+            entCache.put(requested, result);
         }
         // we're done
         return result;
@@ -1233,13 +1211,15 @@
             NodeTypeDef ntd = (NodeTypeDef) iter.next();
 
             EffectiveNodeType ent =
-                    validateNodeTypeDef(ntd, tmpENTCache, tmpNTDefCache, nsReg, lenient);
+                    validateNodeTypeDef(ntd, tmpENTCache, tmpNTDefCache, nsReg,
+                            lenient);
 
             // store new effective node type instance
             tmpENTCache.put(ent);
         }
 
-        // since no exception was thrown so far the definitions are assumed to be valid
+        // since no exception was thrown so far the definitions are assumed to
+        // be valid
         for (Iterator iter = ntDefs.iterator(); iter.hasNext();) {
             NodeTypeDef ntd = (NodeTypeDef) iter.next();
 
@@ -1258,13 +1238,7 @@
         }
 
         // finally add newly created effective node types to entCache
-        for (Iterator it = tmpENTCache.keyIterator(); it.hasNext(); ) {
-            EffectiveNodeTypeCache.WeightedKey k =
-                    (EffectiveNodeTypeCache.WeightedKey) it.next();
-            if (!entCache.contains(k)) {
-                entCache.put(tmpENTCache.get(k));
-            }
-        }
+        entCache = tmpENTCache;
     }
 
     private void internalUnregister(QName name) throws NoSuchNodeTypeException {
@@ -1273,19 +1247,7 @@
             throw new NoSuchNodeTypeException(name.toString());
         }
         registeredNTDefs.remove(name);
-        /**
-         * remove all affected effective node types from aggregates cache
-         * (copy keys first to prevent ConcurrentModificationException)
-         */
-        ArrayList keys = new ArrayList(entCache.keySet());
-        for (Iterator keysIter = keys.iterator(); keysIter.hasNext();) {
-            EffectiveNodeTypeCache.WeightedKey k =
-                    (EffectiveNodeTypeCache.WeightedKey) keysIter.next();
-            EffectiveNodeType ent = entCache.get(k);
-            if (ent.includesNodeType(name)) {
-                entCache.remove(k);
-            }
-        }
+        entCache.invalidate(name);
 
         // remove property & child node definitions
         PropDef[] pda = ntd.getPropertyDefs();