You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by el...@apache.org on 2010/07/15 12:04:08 UTC
svn commit: r964361 [2/5] - in /directory/shared/trunk:
ldap-schema-manager-tests/
ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/
ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/schema/syntaxCheck...
Added: directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/ProtectedItem_RestrictedByTest.java
URL: http://svn.apache.org/viewvc/directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/ProtectedItem_RestrictedByTest.java?rev=964361&view=auto
==============================================================================
--- directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/ProtectedItem_RestrictedByTest.java (added)
+++ directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/ProtectedItem_RestrictedByTest.java Thu Jul 15 10:04:06 2010
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+package org.apache.directory.shared.ldap.aci;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.directory.junit.tools.Concurrent;
+import org.apache.directory.junit.tools.ConcurrentJunitRunner;
+import org.apache.directory.shared.ldap.aci.protectedItem.RestrictedByElem;
+import org.apache.directory.shared.ldap.aci.protectedItem.RestrictedByItem;
+import org.apache.directory.shared.ldap.schema.AttributeType;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+
+/**
+ * Unit tests class ProtectedItem.RestrictedBy.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ */
+@RunWith(ConcurrentJunitRunner.class)
+@Concurrent()
+public class ProtectedItem_RestrictedByTest
+{
+ RestrictedByItem restrictedByA;
+ RestrictedByItem restrictedByACopy;
+ RestrictedByItem restrictedByB;
+ RestrictedByItem restrictedByC;
+
+
+ /**
+ * Initialize name instances
+ */
+ @Before
+ public void initNames() throws Exception
+ {
+ RestrictedByElem rbiA = new RestrictedByElem( new AttributeType( "aa" ), new AttributeType( "aa" ) );
+ RestrictedByElem rbiB = new RestrictedByElem( new AttributeType( "bb" ), new AttributeType( "bb" ) );
+ RestrictedByElem rbiC = new RestrictedByElem( new AttributeType( "cc" ), new AttributeType( "cc" ) );
+ RestrictedByElem rbiD = new RestrictedByElem( new AttributeType( "dd" ), new AttributeType( "dd" ) );
+
+ Set<RestrictedByElem> colA = new HashSet<RestrictedByElem>();
+ colA.add( rbiA );
+ colA.add( rbiB );
+ colA.add( rbiC );
+ Set<RestrictedByElem> colB = new HashSet<RestrictedByElem>();
+ colB.add( rbiA );
+ colB.add( rbiB );
+ colB.add( rbiC );
+ Set<RestrictedByElem> colC = new HashSet<RestrictedByElem>();
+ colC.add( rbiB );
+ colC.add( rbiC );
+ colC.add( rbiD );
+
+ restrictedByA = new RestrictedByItem( colA );
+ restrictedByACopy = new RestrictedByItem( colA );
+ restrictedByB = new RestrictedByItem( colB );
+ restrictedByC = new RestrictedByItem( colC );
+ }
+
+
+ @Test
+ public void testEqualsNull() throws Exception
+ {
+ assertFalse( restrictedByA.equals( null ) );
+ }
+
+
+ @Test
+ public void testEqualsReflexive() throws Exception
+ {
+ assertEquals( restrictedByA, restrictedByA );
+ }
+
+
+ @Test
+ public void testHashCodeReflexive() throws Exception
+ {
+ assertEquals( restrictedByA.hashCode(), restrictedByA.hashCode() );
+ }
+
+
+ @Test
+ public void testEqualsSymmetric() throws Exception
+ {
+ assertEquals( restrictedByA, restrictedByACopy );
+ assertEquals( restrictedByACopy, restrictedByA );
+ }
+
+
+ @Test
+ public void testHashCodeSymmetric() throws Exception
+ {
+ assertEquals( restrictedByA.hashCode(), restrictedByACopy.hashCode() );
+ assertEquals( restrictedByACopy.hashCode(), restrictedByA.hashCode() );
+ }
+
+
+ @Test
+ public void testEqualsTransitive() throws Exception
+ {
+ assertEquals( restrictedByA, restrictedByACopy );
+ assertEquals( restrictedByACopy, restrictedByB );
+ assertEquals( restrictedByA, restrictedByB );
+ }
+
+
+ @Test
+ public void testHashCodeTransitive() throws Exception
+ {
+ assertEquals( restrictedByA.hashCode(), restrictedByACopy.hashCode() );
+ assertEquals( restrictedByACopy.hashCode(), restrictedByB.hashCode() );
+ assertEquals( restrictedByA.hashCode(), restrictedByB.hashCode() );
+ }
+
+
+ @Test
+ public void testNotEqualDiffValue() throws Exception
+ {
+ assertFalse( restrictedByA.equals( restrictedByC ) );
+ assertFalse( restrictedByC.equals( restrictedByA ) );
+ }
+}
Added: directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/ProtectedItem_SelfValueTest.java
URL: http://svn.apache.org/viewvc/directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/ProtectedItem_SelfValueTest.java?rev=964361&view=auto
==============================================================================
--- directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/ProtectedItem_SelfValueTest.java (added)
+++ directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/ProtectedItem_SelfValueTest.java Thu Jul 15 10:04:06 2010
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+package org.apache.directory.shared.ldap.aci;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.directory.junit.tools.Concurrent;
+import org.apache.directory.junit.tools.ConcurrentJunitRunner;
+import org.apache.directory.shared.ldap.aci.protectedItem.SelfValueItem;
+import org.apache.directory.shared.ldap.schema.AttributeType;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+
+/**
+ * Unit tests class ProtectedItem.SelfValue.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ */
+@RunWith(ConcurrentJunitRunner.class)
+@Concurrent()
+public class ProtectedItem_SelfValueTest
+{
+ SelfValueItem selfValueA;
+ SelfValueItem selfValueACopy;
+ SelfValueItem selfValueB;
+ SelfValueItem selfValueC;
+
+
+ /**
+ * Initialize name instances
+ */
+ @Before
+ public void initNames() throws Exception
+ {
+ Set<AttributeType> colA = new HashSet<AttributeType>();
+ colA.add( new AttributeType( "aa" ) );
+ colA.add( new AttributeType( "bb" ) );
+ colA.add( new AttributeType( "cc" ) );
+ Set<AttributeType> colB = new HashSet<AttributeType>();
+ colB.add( new AttributeType( "aa" ) );
+ colB.add( new AttributeType( "bb" ) );
+ colB.add( new AttributeType( "cc" ) );
+ Set<AttributeType> colC = new HashSet<AttributeType>();
+ colC.add( new AttributeType( "bb" ) );
+ colC.add( new AttributeType( "cc" ) );
+ colC.add( new AttributeType( "dd" ) );
+
+ selfValueA = new SelfValueItem( colA );
+ selfValueACopy = new SelfValueItem( colA );
+ selfValueB = new SelfValueItem( colB );
+ selfValueC = new SelfValueItem( colC );
+ }
+
+
+ @Test
+ public void testEqualsNull() throws Exception
+ {
+ assertFalse( selfValueA.equals( null ) );
+ }
+
+
+ @Test
+ public void testEqualsReflexive() throws Exception
+ {
+ assertEquals( selfValueA, selfValueA );
+ }
+
+
+ @Test
+ public void testHashCodeReflexive() throws Exception
+ {
+ assertEquals( selfValueA.hashCode(), selfValueA.hashCode() );
+ }
+
+
+ @Test
+ public void testEqualsSymmetric() throws Exception
+ {
+ assertEquals( selfValueA, selfValueACopy );
+ assertEquals( selfValueACopy, selfValueA );
+ }
+
+
+ @Test
+ public void testHashCodeSymmetric() throws Exception
+ {
+ assertEquals( selfValueA.hashCode(), selfValueACopy.hashCode() );
+ assertEquals( selfValueACopy.hashCode(), selfValueA.hashCode() );
+ }
+
+
+ @Test
+ public void testEqualsTransitive() throws Exception
+ {
+ assertEquals( selfValueA, selfValueACopy );
+ assertEquals( selfValueACopy, selfValueB );
+ assertEquals( selfValueA, selfValueB );
+ }
+
+
+ @Test
+ public void testHashCodeTransitive() throws Exception
+ {
+ assertEquals( selfValueA.hashCode(), selfValueACopy.hashCode() );
+ assertEquals( selfValueACopy.hashCode(), selfValueB.hashCode() );
+ assertEquals( selfValueA.hashCode(), selfValueB.hashCode() );
+ }
+
+
+ @Test
+ public void testNotEqualDiffValue() throws Exception
+ {
+ assertFalse( selfValueA.equals( selfValueC ) );
+ assertFalse( selfValueC.equals( selfValueA ) );
+ }
+}
Added: directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/UserClass_NameTest.java
URL: http://svn.apache.org/viewvc/directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/UserClass_NameTest.java?rev=964361&view=auto
==============================================================================
--- directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/UserClass_NameTest.java (added)
+++ directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/UserClass_NameTest.java Thu Jul 15 10:04:06 2010
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+package org.apache.directory.shared.ldap.aci;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.directory.junit.tools.Concurrent;
+import org.apache.directory.junit.tools.ConcurrentJunitRunner;
+import org.apache.directory.shared.ldap.aci.UserClass.Name;
+import org.apache.directory.shared.ldap.exception.LdapInvalidDnException;
+import org.apache.directory.shared.ldap.name.DN;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+
+/**
+ * Unit tests class UserClass.Name.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ */
+@RunWith(ConcurrentJunitRunner.class)
+@Concurrent()
+public class UserClass_NameTest
+{
+ private Name nameA;
+ private Name nameACopy;
+ private Name nameB;
+ private Name nameC;
+
+
+ /**
+ * Initialize name instances
+ */
+ @Before
+ public void initNames() throws LdapInvalidDnException
+ {
+ Set<DN> dnSetA = new HashSet<DN>();
+ dnSetA.add( new DN( "a=aa" ) );
+ dnSetA.add( new DN( "b=bb" ) );
+
+ Set<DN> dnSetB = new HashSet<DN>();
+ dnSetB.add( new DN( "b=bb" ) );
+ dnSetB.add( new DN( "a=aa" ) );
+
+ Set<DN> dnSetC = new HashSet<DN>();
+ dnSetC.add( new DN( "a=aa" ) );
+ dnSetC.add( new DN( "b=bb" ) );
+
+ Set<DN> dnSetD = new HashSet<DN>();
+ dnSetD.add( new DN( "b=bb" ) );
+ dnSetD.add( new DN( "c=cc" ) );
+
+ nameA = new Name( dnSetA );
+ nameACopy = new Name( dnSetB );
+ nameB = new Name( dnSetC );
+ nameC = new Name( dnSetD );
+ }
+
+
+ @Test
+ public void testEqualsNull() throws Exception
+ {
+ assertFalse( nameA.equals( null ) );
+ }
+
+
+ @Test
+ public void testEqualsReflexive() throws Exception
+ {
+ assertEquals( nameA, nameA );
+ }
+
+
+ @Test
+ public void testHashCodeReflexive() throws Exception
+ {
+ assertEquals( nameA.hashCode(), nameA.hashCode() );
+ }
+
+
+ @Test
+ public void testEqualsSymmetric() throws Exception
+ {
+ assertEquals( nameA, nameACopy );
+ assertEquals( nameACopy, nameA );
+ }
+
+
+ @Test
+ public void testHashCodeSymmetric() throws Exception
+ {
+ assertEquals( nameA.hashCode(), nameACopy.hashCode() );
+ assertEquals( nameACopy.hashCode(), nameA.hashCode() );
+ }
+
+
+ @Test
+ public void testEqualsTransitive() throws Exception
+ {
+ assertEquals( nameA, nameACopy );
+ assertEquals( nameACopy, nameB );
+ assertEquals( nameA, nameB );
+ }
+
+
+ @Test
+ public void testHashCodeTransitive() throws Exception
+ {
+ assertEquals( nameA.hashCode(), nameACopy.hashCode() );
+ assertEquals( nameACopy.hashCode(), nameB.hashCode() );
+ assertEquals( nameA.hashCode(), nameB.hashCode() );
+ }
+
+
+ @Test
+ public void testNotEqualDiffValue() throws Exception
+ {
+ assertFalse( nameA.equals( nameC ) );
+ assertFalse( nameC.equals( nameA ) );
+ }
+}
\ No newline at end of file
Added: directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/UserClass_SubtreeTest.java
URL: http://svn.apache.org/viewvc/directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/UserClass_SubtreeTest.java?rev=964361&view=auto
==============================================================================
--- directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/UserClass_SubtreeTest.java (added)
+++ directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/aci/UserClass_SubtreeTest.java Thu Jul 15 10:04:06 2010
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+package org.apache.directory.shared.ldap.aci;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.directory.junit.tools.Concurrent;
+import org.apache.directory.junit.tools.ConcurrentJunitRunner;
+import org.apache.directory.shared.ldap.aci.UserClass.Subtree;
+import org.apache.directory.shared.ldap.name.DN;
+import org.apache.directory.shared.ldap.subtree.BaseSubtreeSpecification;
+import org.apache.directory.shared.ldap.subtree.SubtreeSpecification;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+
+/**
+ * Unit tests class UserClass.Subtree.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ */
+@RunWith(ConcurrentJunitRunner.class)
+@Concurrent()
+public class UserClass_SubtreeTest
+{
+ Subtree subtreeA;
+ Subtree subtreeACopy;
+ Subtree subtreeB;
+ Subtree subtreeC;
+
+
+ /**
+ * Initialize name instances
+ */
+ @Before
+ public void initNames() throws Exception
+ {
+ SubtreeSpecification subtreeSpecA = new BaseSubtreeSpecification();
+ SubtreeSpecification subtreeSpecB = new BaseSubtreeSpecification();
+ SubtreeSpecification subtreeSpecC = new BaseSubtreeSpecification();
+ SubtreeSpecification subtreeSpecD = new BaseSubtreeSpecification( new DN( "cn=dummy" ) );
+
+ Set<SubtreeSpecification> colA = new HashSet<SubtreeSpecification>();
+ colA.add( subtreeSpecA );
+ colA.add( subtreeSpecB );
+ colA.add( subtreeSpecC );
+ Set<SubtreeSpecification> colB = new HashSet<SubtreeSpecification>();
+ colB.add( subtreeSpecA );
+ colB.add( subtreeSpecB );
+ colB.add( subtreeSpecC );
+ Set<SubtreeSpecification> colC = new HashSet<SubtreeSpecification>();
+ colC.add( subtreeSpecB );
+ colC.add( subtreeSpecC );
+ colC.add( subtreeSpecD );
+
+ subtreeA = new Subtree( colA );
+ subtreeACopy = new Subtree( colA );
+ subtreeB = new Subtree( colB );
+ subtreeC = new Subtree( colC );
+ }
+
+
+ @Test
+ public void testEqualsNull() throws Exception
+ {
+ assertFalse( subtreeA.equals( null ) );
+ }
+
+
+ @Test
+ public void testEqualsReflexive() throws Exception
+ {
+ assertEquals( subtreeA, subtreeA );
+ }
+
+
+ @Test
+ public void testHashCodeReflexive() throws Exception
+ {
+ assertEquals( subtreeA.hashCode(), subtreeA.hashCode() );
+ }
+
+
+ @Test
+ public void testEqualsSymmetric() throws Exception
+ {
+ assertEquals( subtreeA, subtreeACopy );
+ assertEquals( subtreeACopy, subtreeA );
+ }
+
+
+ @Test
+ public void testHashCodeSymmetric() throws Exception
+ {
+ assertEquals( subtreeA.hashCode(), subtreeACopy.hashCode() );
+ assertEquals( subtreeACopy.hashCode(), subtreeA.hashCode() );
+ }
+
+
+ @Test
+ public void testEqualsTransitive() throws Exception
+ {
+ assertEquals( subtreeA, subtreeACopy );
+ assertEquals( subtreeACopy, subtreeB );
+ assertEquals( subtreeA, subtreeB );
+ }
+
+
+ @Test
+ public void testHashCodeTransitive() throws Exception
+ {
+ assertEquals( subtreeA.hashCode(), subtreeACopy.hashCode() );
+ assertEquals( subtreeACopy.hashCode(), subtreeB.hashCode() );
+ assertEquals( subtreeA.hashCode(), subtreeB.hashCode() );
+ }
+
+
+ @Test
+ public void testNotEqualDiffValue() throws Exception
+ {
+ assertFalse( subtreeA.equals( subtreeC ) );
+ assertFalse( subtreeC.equals( subtreeA ) );
+ }
+}
Added: directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/subtree/SubtreeSpecificationParserTest.java
URL: http://svn.apache.org/viewvc/directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/subtree/SubtreeSpecificationParserTest.java?rev=964361&view=auto
==============================================================================
--- directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/subtree/SubtreeSpecificationParserTest.java (added)
+++ directory/shared/trunk/ldap-schema-manager-tests/src/test/java/org/apache/directory/shared/ldap/subtree/SubtreeSpecificationParserTest.java Thu Jul 15 10:04:06 2010
@@ -0,0 +1,486 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.directory.shared.ldap.subtree;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.directory.junit.tools.Concurrent;
+import org.apache.directory.junit.tools.ConcurrentJunitRunner;
+import org.apache.directory.shared.ldap.name.DN;
+import org.apache.directory.shared.ldap.schema.ObjectClass;
+import org.apache.directory.shared.ldap.schema.SchemaManager;
+import org.apache.directory.shared.ldap.schema.loader.ldif.JarLdifSchemaLoader;
+import org.apache.directory.shared.ldap.schema.manager.impl.DefaultSchemaManager;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+/**
+ * Unit tests class for Subtree Specification parser (wrapper).
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ */
+@RunWith(ConcurrentJunitRunner.class)
+@Concurrent()
+public class SubtreeSpecificationParserTest
+{
+ /** the ACIItem checker wrapper */
+ private static SubtreeSpecificationChecker checker;
+
+ /** the ss parser wrapper */
+ private static SubtreeSpecificationParser parser;
+
+ /** A valid empty specification with single white space between brackets */
+ private static final String EMPTY_SPEC = "{ }";
+
+ /** A valid specification only with base set */
+ private static final String SPEC_WITH_BASE = "{ base \"ou=system\" }";
+
+ /** An invalid specification with missing white space and base set */
+ private static final String INVALID_SPEC_WITH_BASE_AND_MISSING_WS = "{ base\"ou=system\"}";
+
+ /** A valid specification with some specific exclusions set */
+ private static final String SPEC_WITH_SPECIFICEXCLUSIONS = "{ specificExclusions { chopAfter:\"ef=gh\", chopBefore:\"ab=cd\" } }";
+
+ /** A valid specification with empty specific exclusions set */
+ private static final String SPEC_WITH_EMPTY_SPECIFICEXCLUSIONS = "{ specificExclusions { } }";
+
+ /** A valid specification with minimum and maximum set */
+ private static final String SPEC_WITH_MINIMUM_AND_MAXIMUM = "{ minimum 1, maximum 2 }";
+
+ /** A valid specification with base and minimum and maximum set */
+ private static final String SPEC_WITH_BASE_AND_MINIMUM_AND_MAXIMUM = "{ base \"ou=ORGANIZATION UNIT\", minimum 1, maximum 2 }";
+
+ /**
+ * A valid specification with base and specific exclusions and minimum and
+ * maximum set
+ */
+ private static final String SPEC_WITH_BASE_AND_SPECIFICEXCLUSIONS_AND_MINIMUM_AND_MAXIMUM = "{ base \"ou=people\", specificExclusions { chopBefore:\"x=y\""
+ + ", chopAfter:\"k=l\", chopBefore:\"y=z\", chopAfter:\"l=m\" }, minimum 7, maximum 77 }";
+
+ /** A valid specification with refinement set */
+ private static final String SPEC_WITH_REFINEMENT = "{ base \"ou=system\", specificationFilter and:{ and:{ item:2.5.6.0"
+ + ", or:{ item:2.5.6.1, item:person } }, not: item:2.5.6.2 } }";
+
+ /** A valid specification with base and an empty refinement set */
+ private static final String SPEC_WITH_BASE_AND_EMPTY_REFINEMENT = "{ base \"ou=system\", specificationFilter and:{ } }";
+
+ /** A valid specification with ALL IN ONE */
+ private static final String SPEC_WITH_ALL_IN_ONE = "{ base \"ou=departments\""
+ + ", specificExclusions { chopBefore:\"x=y\", chopAfter:\"k=l\", chopBefore:\"y=z\", chopAfter:\"l=m\" }"
+ + ", minimum 7, maximum 77"
+ + ", specificationFilter and:{ and:{ item:2.5.6.0, or:{ item:2.5.6.1, item:2.5.6.2 } }, not: item:2.5.6.3 } }";
+
+ /** An valid specification with unordinary component order */
+ private static final String SPEC_ORDER_OF_COMPONENTS_DOES_NOT_MATTER = "{ base \"ou=system\", minimum 3, specificExclusions { chopBefore:\"x=y\" } }";
+
+ /** An invalid specification with completely unrelated content */
+ private static final String INVALID_SILLY_THING = "How much wood would a wood chuck chuck if a wood chuck would chuck wood?";
+
+ /** holds multithreaded success value */
+ boolean isSuccessMultithreaded = true;
+
+ /** The schema manager */
+ private static SchemaManager schemaManager;
+
+ /** Some global OC */
+ private static ObjectClass TOP_OC; // 2.5.6.0
+ private static ObjectClass ALIAS_OC; // 2.5.6.1
+ private static ObjectClass COUNTRY_OC; // 2.5.6.2
+ private static ObjectClass LOCALITY_OC; // 2.5.6.3
+ private static ObjectClass PERSON_OC; // 2.5.6.6
+
+ /**
+ * Initialization
+ */
+ @BeforeClass
+ public static void init() throws Exception
+ {
+ JarLdifSchemaLoader loader = new JarLdifSchemaLoader();
+ schemaManager = new DefaultSchemaManager( loader );
+
+ schemaManager.loadAllEnabled();
+
+ checker = new SubtreeSpecificationChecker( schemaManager );
+ parser = new SubtreeSpecificationParser( schemaManager );
+
+ TOP_OC = schemaManager.lookupObjectClassRegistry( "top" );
+ ALIAS_OC = schemaManager.lookupObjectClassRegistry( "alias" );
+ COUNTRY_OC = schemaManager.lookupObjectClassRegistry( "country" );
+ LOCALITY_OC = schemaManager.lookupObjectClassRegistry( "locality" );
+ PERSON_OC = schemaManager.lookupObjectClassRegistry( "person" );
+ }
+
+
+ /**
+ * Tests the parser with a valid empty specification.
+ */
+ @Test
+ public void testEmptySpec() throws Exception
+ {
+ SubtreeSpecification ss = parser.parse( EMPTY_SPEC );
+ assertNotNull( ss );
+
+ // try a second time
+ ss = parser.parse( EMPTY_SPEC );
+ assertNotNull( ss );
+
+ // try a third time
+ ss = parser.parse( EMPTY_SPEC );
+ assertNotNull( ss );
+ }
+
+
+ /**
+ * Tests the parser with a valid specification with base set.
+ */
+ @Test
+ public void testSpecWithBase() throws Exception
+ {
+ SubtreeSpecification ss = parser.parse( SPEC_WITH_BASE );
+ assertNotNull( ss );
+
+ assertEquals( "ou=system", ss.getBase().toString() );
+ }
+
+
+ /**
+ * Tests the parser with an invalid specification with missing white spaces
+ * and base set.
+ */
+ @Test
+ public void testInvalidSpecWithBaseAndMissingWS() throws Exception
+ {
+ try
+ {
+ parser.parse( INVALID_SPEC_WITH_BASE_AND_MISSING_WS );
+ fail( "testInvalidSpecWithBaseAndMissingWS() should never come here..." );
+ }
+ catch ( ParseException e )
+ {
+ assertNotNull( e );
+ }
+ }
+
+
+ /**
+ * Tests the parser with a valid specification with some specific exclusions
+ * set.
+ */
+ @Test
+ public void testSpecWithSpecificExclusions() throws Exception
+ {
+ SubtreeSpecification ss = parser.parse( SPEC_WITH_SPECIFICEXCLUSIONS );
+ assertFalse( ss.getChopBeforeExclusions().isEmpty() );
+ assertFalse( ss.getChopAfterExclusions().isEmpty() );
+ assertTrue( ss.getChopBeforeExclusions().contains( new DN( "ab=cd" ) ) );
+ assertTrue( ss.getChopAfterExclusions().contains( new DN( "ef=gh" ) ) );
+
+ // try a second time
+ ss = parser.parse( SPEC_WITH_SPECIFICEXCLUSIONS );
+ assertFalse( ss.getChopBeforeExclusions().isEmpty() );
+ assertFalse( ss.getChopAfterExclusions().isEmpty() );
+ assertTrue( ss.getChopBeforeExclusions().contains( new DN( "ab=cd" ) ) );
+ assertTrue( ss.getChopAfterExclusions().contains( new DN( "ef=gh" ) ) );
+
+ // try a third time
+ ss = parser.parse( SPEC_WITH_SPECIFICEXCLUSIONS );
+ assertFalse( ss.getChopBeforeExclusions().isEmpty() );
+ assertFalse( ss.getChopAfterExclusions().isEmpty() );
+ assertTrue( ss.getChopBeforeExclusions().contains( new DN( "ab=cd" ) ) );
+ assertTrue( ss.getChopAfterExclusions().contains( new DN( "ef=gh" ) ) );
+ }
+
+
+ /**
+ * Tests the parser with a valid specification with an empty specific
+ * exclusions set.
+ */
+ @Test
+ public void testSpecWithEmptySpecificExclusions() throws Exception
+ {
+ SubtreeSpecification ss = parser.parse( SPEC_WITH_EMPTY_SPECIFICEXCLUSIONS );
+ assertNotNull( ss );
+
+ assertTrue( ss.getChopBeforeExclusions().isEmpty() );
+ }
+
+
+ /**
+ * Tests the parser with a valid specification with minimum and maximum set.
+ */
+ @Test
+ public void testSpecWithMinimumAndMaximum() throws Exception
+ {
+ SubtreeSpecification ss = parser.parse( SPEC_WITH_MINIMUM_AND_MAXIMUM );
+ assertEquals( 1, ss.getMinBaseDistance() );
+ assertEquals( 2, ss.getMaxBaseDistance() );
+
+ // try a second time
+ ss = parser.parse( SPEC_WITH_MINIMUM_AND_MAXIMUM );
+ assertEquals( 1, ss.getMinBaseDistance() );
+ assertEquals( 2, ss.getMaxBaseDistance() );
+
+ // try a third time
+ ss = parser.parse( SPEC_WITH_MINIMUM_AND_MAXIMUM );
+ assertEquals( 1, ss.getMinBaseDistance() );
+ assertEquals( 2, ss.getMaxBaseDistance() );
+ }
+
+
+ /**
+ * Tests the parser with a valid specification with base and minimum and
+ * maximum set.
+ */
+ @Test
+ public void testWithBaseAndMinimumAndMaximum() throws Exception
+ {
+ SubtreeSpecification ss = parser.parse( SPEC_WITH_BASE_AND_MINIMUM_AND_MAXIMUM );
+
+ assertEquals( new DN( "ou=ORGANIZATION UNIT" ).getName(), ss.getBase().getName() );
+ assertEquals( 1, ss.getMinBaseDistance() );
+ assertEquals( 2, ss.getMaxBaseDistance() );
+ }
+
+
+ /**
+ * Tests the parser with a valid specification with base and specific
+ * exclusions and minimum and maximum set.
+ */
+ @Test
+ public void testSpecWithBaseAndSpecificExclusionsAndMinimumAndMaximum() throws Exception
+ {
+ SubtreeSpecification ss = parser.parse( SPEC_WITH_BASE_AND_SPECIFICEXCLUSIONS_AND_MINIMUM_AND_MAXIMUM );
+ assertNotNull( ss );
+
+ assertEquals( "ou=people", ss.getBase().toString() );
+ assertTrue( ss.getChopBeforeExclusions().contains( new DN( "x=y" ).normalize( schemaManager.getNormalizerMapping() ) ) );
+ assertTrue( ss.getChopBeforeExclusions().contains( new DN( "y=z" ).normalize( schemaManager.getNormalizerMapping() ) ) );
+ assertTrue( ss.getChopAfterExclusions().contains( new DN( "k=l" ).normalize( schemaManager.getNormalizerMapping() ) ) );
+ assertTrue( ss.getChopAfterExclusions().contains( new DN( "l=m" ).normalize( schemaManager.getNormalizerMapping() ) ) );
+ assertEquals( 7, ss.getMinBaseDistance() );
+ assertEquals( 77, ss.getMaxBaseDistance() );
+ }
+
+
+ /**
+ * Tests the parser with a valid specification with refinement set.
+ */
+ @Test
+ public void testSpecWithRefinement() throws Exception
+ {
+ SubtreeSpecification ss = parser.parse( SPEC_WITH_REFINEMENT );
+
+ // The items
+ Refinement topItem = new ItemRefinement( TOP_OC );
+ Refinement aliasItem = new ItemRefinement( ALIAS_OC );
+ Refinement personItem = new ItemRefinement( PERSON_OC );
+ Refinement countryItem = new ItemRefinement( COUNTRY_OC );
+
+ // The inner OR refinement or:{item:2.5.6.1, item:person}
+ List<Refinement> orList = new ArrayList<Refinement>();
+ orList.add( aliasItem );
+ orList.add( personItem );
+
+ Refinement orRefinement = new OrRefinement( orList );
+
+ // The inner AND refinement and:{ item:2.5.6.0, or:... }
+ List<Refinement> innerAndList = new ArrayList<Refinement>();
+ innerAndList.add( topItem );
+ innerAndList.add( orRefinement );
+
+ Refinement innerAndRefinement = new AndRefinement( innerAndList );
+
+ // The NOT refinement not:item:2.5.6.2
+ Refinement notRefinement = new NotRefinement( countryItem );
+
+ // The outer AND refinement and:{and:..., not:...}
+ List<Refinement> outerAndList = new ArrayList<Refinement>();
+ outerAndList.add( innerAndRefinement );
+ outerAndList.add( notRefinement );
+
+ Refinement outerAndRefinement = new AndRefinement( outerAndList );
+
+ StringBuilder buffer = new StringBuilder();
+ ss.getRefinement().printRefinementToBuffer( buffer );
+
+ //assertEquals( outerAndRefinement.toString(), buffer );
+ assertEquals( "and: { and: { item: 2.5.6.0, or: { item: 2.5.6.1, item: person } }, not: item: 2.5.6.2 }", buffer.toString() );
+ }
+
+
+ /**
+ * Tests the parser with a valid specification with base and empty
+ * refinement set.
+ */
+ @Test
+ public void testSpecWithBaseAndEmptyRefinement() throws Exception
+ {
+ SubtreeSpecification ss = parser.parse( SPEC_WITH_BASE_AND_EMPTY_REFINEMENT );
+
+ assertEquals( "ou=system", ss.getBase().toString() );
+ }
+
+
+ /**
+ * Tests the parser with a valid specification with all components set.
+ */
+ @Test
+ public void testSpecWithAllInOne() throws Exception
+ {
+ SubtreeSpecification ss = parser.parse( SPEC_WITH_ALL_IN_ONE );
+ assertNotNull( ss );
+ }
+
+
+ /**
+ * Tests the parser with a valid specification with unordinary component
+ * order.
+ */
+ @Test
+ public void testSpecOrderOfComponentsDoesNotMatter() throws Exception
+ {
+ SubtreeSpecification ss = parser.parse( SPEC_ORDER_OF_COMPONENTS_DOES_NOT_MATTER );
+ assertNotNull( ss );
+ }
+
+
+ /**
+ * Tests the parser with an invalid specification with silly things in.
+ */
+ @Test
+ public void testInvalidSillyThing() throws Exception
+ {
+ try
+ {
+ parser.parse( INVALID_SILLY_THING );
+ fail( "testInvalidSillyThing() should never come here..." );
+ }
+ catch ( ParseException e )
+ {
+ assertNotNull( e );
+ }
+ }
+
+
+ /**
+ * Test reusability, especially if the state is resetted.
+ */
+ @Test
+ public void testReusabiltiy() throws Exception
+ {
+ DN firstDN = new DN("k=l");
+ String firstExclusion = "{ specificExclusions { chopAfter:\"k=l\" } }";
+ SubtreeSpecification firstSpec = parser.parse( firstExclusion );
+ assertEquals( 1, firstSpec.getChopAfterExclusions().size() );
+ assertEquals( firstDN, firstSpec.getChopAfterExclusions().iterator().next() );
+
+ DN secondDN = new DN("x=y");
+ String secondExclusion = "{ specificExclusions { chopAfter:\"x=y\" } }";
+ SubtreeSpecification secondSpec = parser.parse( secondExclusion );
+ assertEquals( 1, secondSpec.getChopAfterExclusions().size() );
+ assertEquals( secondDN, secondSpec.getChopAfterExclusions().iterator().next() );
+
+ }
+
+
+ /**
+ * Tests the multithreaded use of a single parser.
+ */
+ @Test
+ public void testMultiThreaded() throws Exception
+ {
+ // start up and track all threads (40 threads)
+ List<Thread> threads = new ArrayList<Thread>();
+ for ( int ii = 0; ii < 10; ii++ )
+ {
+ Thread t0 = new Thread( new ParseSpecification( EMPTY_SPEC ) );
+ Thread t1 = new Thread( new ParseSpecification( SPEC_WITH_SPECIFICEXCLUSIONS ) );
+ Thread t2 = new Thread( new ParseSpecification( SPEC_WITH_MINIMUM_AND_MAXIMUM ) );
+ Thread t3 = new Thread( new ParseSpecification( SPEC_WITH_ALL_IN_ONE ) );
+ threads.add( t0 );
+ threads.add( t1 );
+ threads.add( t2 );
+ threads.add( t3 );
+ t0.start();
+ t1.start();
+ t2.start();
+ t3.start();
+ }
+
+ // wait until all threads have died
+ boolean hasLiveThreads = false;
+ do
+ {
+ hasLiveThreads = false;
+
+ for ( int ii = 0; ii < threads.size(); ii++ )
+ {
+ Thread t = threads.get( ii );
+ hasLiveThreads = hasLiveThreads || t.isAlive();
+ }
+ }
+ while ( hasLiveThreads );
+
+ // check that no one thread failed to parse and generate a SS object
+ assertTrue( isSuccessMultithreaded );
+ }
+
+ /**
+ * Used to test multithreaded use of a single parser.
+ */
+ class ParseSpecification implements Runnable
+ {
+ private final String specStr;
+
+ SubtreeSpecification result;
+
+
+ public ParseSpecification(String specStr)
+ {
+ this.specStr = specStr;
+ }
+
+
+ public void run()
+ {
+ try
+ {
+ result = parser.parse( specStr );
+ }
+ catch ( ParseException e )
+ {
+ e.printStackTrace();
+ }
+
+ isSuccessMultithreaded = isSuccessMultithreaded && ( result != null );
+ }
+ }
+}
Modified: directory/shared/trunk/ldap/pom.xml
URL: http://svn.apache.org/viewvc/directory/shared/trunk/ldap/pom.xml?rev=964361&r1=964360&r2=964361&view=diff
==============================================================================
--- directory/shared/trunk/ldap/pom.xml (original)
+++ directory/shared/trunk/ldap/pom.xml Thu Jul 15 10:04:06 2010
@@ -87,7 +87,7 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antlr-plugin</artifactId>
<configuration>
- <grammars>schema-extension.g schema-qdstring.g schema-value.g schema.g TriggerSpecification.g distinguishedName.g</grammars>
+ <grammars>schema-extension.g schema-qdstring.g schema-value.g schema.g TriggerSpecification.g distinguishedName.g ACIItem.g ACIItemChecker.g subtree-specification.g SubtreeSpecificationChecker.g</grammars>
</configuration>
<executions>
<execution>
Added: directory/shared/trunk/ldap/src/main/antlr/ACIItem.g
URL: http://svn.apache.org/viewvc/directory/shared/trunk/ldap/src/main/antlr/ACIItem.g?rev=964361&view=auto
==============================================================================
--- directory/shared/trunk/ldap/src/main/antlr/ACIItem.g (added)
+++ directory/shared/trunk/ldap/src/main/antlr/ACIItem.g Thu Jul 15 10:04:06 2010
@@ -0,0 +1,1547 @@
+header
+{
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+
+package org.apache.directory.shared.ldap.aci;
+
+
+import java.util.List;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.Enumeration;
+
+import org.apache.directory.shared.ldap.filter.BranchNode;
+import org.apache.directory.shared.ldap.filter.AndNode;
+import org.apache.directory.shared.ldap.filter.OrNode;
+import org.apache.directory.shared.ldap.filter.NotNode;
+import org.apache.directory.shared.ldap.filter.ExprNode;
+import org.apache.directory.shared.ldap.filter.LeafNode;
+import org.apache.directory.shared.ldap.filter.EqualityNode;
+import org.apache.directory.shared.ldap.filter.FilterParser;
+import org.apache.directory.shared.ldap.name.NameComponentNormalizer;
+import org.apache.directory.shared.ldap.subtree.SubtreeSpecification;
+import org.apache.directory.shared.ldap.subtree.SubtreeSpecificationModifier;
+import org.apache.directory.shared.ldap.util.ComponentsMonitor;
+import org.apache.directory.shared.ldap.util.MandatoryAndOptionalComponentsMonitor;
+import org.apache.directory.shared.ldap.util.MandatoryComponentsMonitor;
+import org.apache.directory.shared.ldap.util.NamespaceTools;
+import org.apache.directory.shared.ldap.util.NoDuplicateKeysMap;
+import org.apache.directory.shared.ldap.util.OptionalComponentsMonitor;
+import org.apache.directory.shared.ldap.name.DN;
+import org.apache.directory.shared.ldap.name.RDN;
+import org.apache.directory.shared.ldap.schema.AttributeType;
+import org.apache.directory.shared.ldap.schema.SchemaManager;
+import org.apache.directory.shared.ldap.constants.SchemaConstants;
+import org.apache.directory.shared.ldap.constants.AuthenticationLevel;
+import org.apache.directory.shared.ldap.entry.StringValue;
+import org.apache.directory.shared.ldap.entry.EntryAttribute;
+import org.apache.directory.shared.ldap.entry.DefaultEntryAttribute;
+import org.apache.directory.shared.ldap.exception.LdapException;
+import org.apache.directory.shared.ldap.aci.protectedItem.AllAttributeValuesItem;
+import org.apache.directory.shared.ldap.aci.protectedItem.AttributeTypeItem;
+import org.apache.directory.shared.ldap.aci.protectedItem.AttributeValueItem;
+import org.apache.directory.shared.ldap.aci.protectedItem.SelfValueItem;
+import org.apache.directory.shared.ldap.aci.protectedItem.ClassesItem;
+import org.apache.directory.shared.ldap.aci.protectedItem.EntryItem;
+import org.apache.directory.shared.ldap.aci.protectedItem.RangeOfValuesItem;
+import org.apache.directory.shared.ldap.aci.protectedItem.MaxImmSubItem;
+import org.apache.directory.shared.ldap.aci.protectedItem.MaxValueCountElem;
+import org.apache.directory.shared.ldap.aci.protectedItem.MaxValueCountItem;
+import org.apache.directory.shared.ldap.aci.protectedItem.RestrictedByElem;
+import org.apache.directory.shared.ldap.aci.protectedItem.RestrictedByItem;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+}
+
+
+// ----------------------------------------------------------------------------
+// parser class definition
+// ----------------------------------------------------------------------------
+
+/**
+ * The antlr generated ACIItem parser.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ */
+class AntlrACIItemParser extends Parser;
+
+
+// ----------------------------------------------------------------------------
+// parser options
+// ----------------------------------------------------------------------------
+
+options
+{
+ k = 1; // ;-)
+ defaultErrorHandler = false;
+}
+
+
+// ----------------------------------------------------------------------------
+// imaginary tokens
+// ----------------------------------------------------------------------------
+
+tokens
+{
+ ATTRIBUTE_VALUE_CANDIDATE;
+ RANGE_OF_VALUES_CANDIDATE;
+}
+
+
+// ----------------------------------------------------------------------------
+// parser initialization
+// ----------------------------------------------------------------------------
+
+{
+ private static final Logger log = LoggerFactory.getLogger( AntlrACIItemParser.class );
+
+ NameComponentNormalizer normalizer;
+
+ // nonshared global data needed to avoid extensive pass/return stuff
+ // these are only used by three first order components
+ private String identificationTag;
+ private AuthenticationLevel authenticationLevel;
+ private Integer aciPrecedence = null;
+
+ private boolean isItemFirstACIItem;
+
+ // shared global data needed to avoid extensive pass/return stuff
+ private Set<ProtectedItem> protectedItems;
+ private Map<String, ProtectedItem> protectedItemsMap;
+ private Set<UserClass> userClasses;
+ private Map<String, UserClass> userClassesMap;
+ private Set<ItemPermission> itemPermissions;
+ private Integer precedence = null;
+ private Set<GrantAndDenial> grantsAndDenials;
+ private Set<UserPermission> userPermissions;
+
+ /** The SchemaManager */
+ private SchemaManager schemaManager;
+
+ private Set<DN> chopBeforeExclusions;
+ private Set<DN> chopAfterExclusions;
+ private SubtreeSpecificationModifier ssModifier = null;
+
+ private ComponentsMonitor mainACIItemComponentsMonitor;
+ private ComponentsMonitor itemPermissionComponentsMonitor;
+ private ComponentsMonitor userPermissionComponentsMonitor;
+ private ComponentsMonitor subtreeSpecificationComponentsMonitor;
+
+
+ /**
+ * Creates a (normalizing) subordinate DnParser for parsing Names.
+ * This method MUST be called for each instance while we cannot do
+ * constructor overloading for this class.
+ *
+ * @return the DnParser to be used for parsing Names
+ */
+ public void init( SchemaManager schemaManager )
+ {
+ this.schemaManager = schemaManager;
+ }
+
+ /**
+ * Sets the NameComponentNormalizer for this parser's dnParser.
+ */
+ public void setNormalizer(NameComponentNormalizer normalizer)
+ {
+ this.normalizer = normalizer;
+ }
+
+ private int token2Integer( Token token ) throws RecognitionException
+ {
+ int i = 0;
+
+ try
+ {
+ i = Integer.parseInt( token.getText());
+ }
+ catch ( NumberFormatException e )
+ {
+ throw new RecognitionException( "Value of INTEGER token " +
+ token.getText() +
+ " cannot be converted to an Integer" );
+ }
+
+ return i;
+ }
+}
+
+
+// ----------------------------------------------------------------------------
+// parser productions
+// ----------------------------------------------------------------------------
+
+wrapperEntryPoint returns [ ACIItem aciItem ]
+{
+ log.debug( "entered wrapperEntryPoint()" );
+ aciItem = null;
+}
+ :
+ ( SP )* aciItem = theACIItem ( SP )* EOF
+ ;
+
+theACIItem returns [ ACIItem aciItem ]
+{
+ log.debug( "entered theACIItem()" );
+ aciItem = null;
+ mainACIItemComponentsMonitor = new MandatoryComponentsMonitor(
+ new String [] { "identificationTag", "precedence", "authenticationLevel", "itemOrUserFirst" } );
+}
+ :
+ OPEN_CURLY
+ ( SP )* mainACIItemComponent ( SP )*
+ ( SEP ( SP )* mainACIItemComponent ( SP )* )*
+ CLOSE_CURLY
+ {
+ if ( !mainACIItemComponentsMonitor.finalStateValid() )
+ {
+ throw new RecognitionException( "Missing mandatory ACIItem components: "
+ + mainACIItemComponentsMonitor.getRemainingComponents() );
+ }
+
+ if ( isItemFirstACIItem )
+ {
+ aciItem = new ItemFirstACIItem(
+ identificationTag,
+ aciPrecedence,
+ authenticationLevel,
+ protectedItems,
+ itemPermissions );
+ }
+ else
+ {
+ aciItem = new UserFirstACIItem(
+ identificationTag,
+ aciPrecedence,
+ authenticationLevel,
+ userClasses,
+ userPermissions );
+ }
+ }
+ ;
+
+mainACIItemComponent
+{
+ log.debug( "entered mainACIItemComponent()" );
+}
+ :
+ aci_identificationTag
+ {
+ mainACIItemComponentsMonitor.useComponent( "identificationTag" );
+ }
+ | aci_precedence
+ {
+ mainACIItemComponentsMonitor.useComponent( "precedence" );
+ }
+ | aci_authenticationLevel
+ {
+ mainACIItemComponentsMonitor.useComponent( "authenticationLevel" );
+ }
+ | aci_itemOrUserFirst
+ {
+ mainACIItemComponentsMonitor.useComponent( "itemOrUserFirst" );
+ }
+ ;
+ exception
+ catch [IllegalArgumentException e]
+ {
+ throw new RecognitionException( e.getMessage() );
+ }
+
+aci_identificationTag
+{
+ log.debug( "entered aci_identificationTag()" );
+}
+ :
+ ID_identificationTag ( SP )+ token:SAFEUTF8STRING
+ {
+ identificationTag = token.getText();
+ }
+ ;
+
+aci_precedence
+{
+ log.debug( "entered aci_precedence()" );
+}
+ :
+ precedence
+ {
+ aciPrecedence = Integer.valueOf( precedence );
+ precedence = null;
+ }
+ ;
+
+precedence
+{
+ log.debug( "entered precedence()" );
+}
+ :
+ ID_precedence ( SP )+ token:INTEGER
+ {
+ precedence = Integer.valueOf( token2Integer( token ) );
+
+ if ( ( precedence < 0 ) || ( precedence > 255 ) )
+ {
+ throw new RecognitionException( "Expecting INTEGER token having an Integer value between 0 and 255, found " + precedence );
+ }
+ }
+ ;
+
+aci_authenticationLevel
+{
+ log.debug( "entered aci_authenticationLevel()" );
+}
+ :
+ ID_authenticationLevel ( SP )+ authenticationLevel
+ ;
+
+authenticationLevel
+{
+ log.debug( "entered authenticationLevel()" );
+}
+ :
+ ID_none
+ {
+ authenticationLevel = AuthenticationLevel.NONE;
+ }
+ |
+ ID_simple
+ {
+ authenticationLevel = AuthenticationLevel.SIMPLE;
+ }
+ |
+ ID_strong
+ {
+ authenticationLevel = AuthenticationLevel.STRONG;
+ }
+ ;
+
+aci_itemOrUserFirst
+{
+ log.debug( "entered aci_itemOrUserFirst()" );
+}
+ :
+ ID_itemOrUserFirst ( SP )+ itemOrUserFirst
+ ;
+
+itemOrUserFirst
+{
+ log.debug( "entered itemOrUserFirst()" );
+}
+ :
+ itemFirst | userFirst
+ ;
+
+itemFirst
+{
+ log.debug( "entered itemFirst()" );
+}
+ :
+ ID_itemFirst ( SP )* COLON ( SP )*
+ OPEN_CURLY ( SP )*
+ protectedItems ( SP )* SEP ( SP )* itemPermissions
+ ( SP )* CLOSE_CURLY
+ {
+ isItemFirstACIItem = true;
+ }
+ ;
+
+userFirst
+{
+ log.debug( "entered userFirst()" );
+}
+ :
+ ID_userFirst ( SP )* COLON ( SP )*
+ OPEN_CURLY ( SP )*
+ userClasses ( SP )* SEP ( SP )* userPermissions
+ ( SP )* CLOSE_CURLY
+ {
+ isItemFirstACIItem = false;
+ }
+ ;
+
+protectedItems
+{
+ log.debug( "entered protectedItems()" );
+ protectedItemsMap = new NoDuplicateKeysMap();
+}
+ :
+ ID_protectedItems ( SP )*
+ OPEN_CURLY ( SP )*
+ (
+ protectedItem ( SP )*
+ ( SEP ( SP )* protectedItem ( SP )* )*
+ )?
+ CLOSE_CURLY
+ {
+ protectedItems = new HashSet<ProtectedItem>( protectedItemsMap.values() );
+ }
+ ;
+ exception
+ catch [IllegalArgumentException e]
+ {
+ throw new RecognitionException( "Protected Items cannot be duplicated. " + e.getMessage() );
+ }
+
+protectedItem
+{
+ log.debug( "entered protectedItem()" );
+}
+ :
+ entry
+ | allUserAttributeTypes
+ | attributeType
+ | allAttributeValues
+ | allUserAttributeTypesAndValues
+ | attributeValue
+ | selfValue
+ | rangeOfValues
+ | maxValueCount
+ | maxImmSub
+ | restrictedBy
+ | classes
+ ;
+
+entry
+{
+ log.debug( "entered entry()" );
+}
+ :
+ ID_entry
+ {
+ protectedItemsMap.put( "entry", ProtectedItem.ENTRY );
+ }
+ ;
+
+allUserAttributeTypes
+{
+ log.debug( "entered allUserAttributeTypes()" );
+}
+ :
+ ID_allUserAttributeTypes
+ {
+ protectedItemsMap.put( "allUserAttributeTypes", ProtectedItem.ALL_USER_ATTRIBUTE_TYPES );
+ }
+ ;
+
+attributeType
+{
+ log.debug( "entered attributeType()" );
+ Set<AttributeType> attributeTypeSet = null;
+}
+ :
+ ID_attributeType ( SP )+ attributeTypeSet=attributeTypeSet
+ {
+ protectedItemsMap.put( "attributeType", new AttributeTypeItem(attributeTypeSet ) );
+ }
+ ;
+
+allAttributeValues
+{
+ log.debug( "entered allAttributeValues()" );
+ Set<AttributeType> attributeTypeSet = null;
+}
+ :
+ ID_allAttributeValues ( SP )+ attributeTypeSet=attributeTypeSet
+ {
+ protectedItemsMap.put( "allAttributeValues", new AllAttributeValuesItem( attributeTypeSet ) );
+ }
+ ;
+
+allUserAttributeTypesAndValues
+{
+ log.debug( "entered allUserAttributeTypesAndValues()" );
+}
+ :
+ ID_allUserAttributeTypesAndValues
+ {
+ protectedItemsMap.put( "allUserAttributeTypesAndValues", ProtectedItem.ALL_USER_ATTRIBUTE_TYPES_AND_VALUES );
+ }
+ ;
+
+attributeValue
+{
+ log.debug( "entered attributeValue()" );
+ String attributeTypeAndValue = null;
+ String attributeType = null;
+ String attributeValue = null;
+ Set<EntryAttribute> attributeSet = new HashSet<EntryAttribute>();
+}
+ :
+ token:ATTRIBUTE_VALUE_CANDIDATE // ate the identifier for subordinate dn parser workaround
+ {
+ // A Dn can be considered as a set of attributeTypeAndValues
+ // So, parse the set as a Dn and extract each attributeTypeAndValue
+ DN attributeTypeAndValueSetAsDn = new DN( token.getText() );
+
+ if ( schemaManager != null )
+ {
+ attributeTypeAndValueSetAsDn.normalize( schemaManager.getNormalizerMapping() );
+ }
+
+ for ( RDN rdn :attributeTypeAndValueSetAsDn.getRdns() )
+ {
+ attributeTypeAndValue = rdn.getNormName();
+ attributeType = NamespaceTools.getRdnAttribute( attributeTypeAndValue );
+ attributeValue = NamespaceTools.getRdnValue( attributeTypeAndValue );
+
+ attributeSet.add( new DefaultEntryAttribute( attributeType, attributeValue ) );
+ log.debug( "An attributeTypeAndValue from the set: " + attributeType + "=" + attributeValue);
+ }
+
+ protectedItemsMap.put( "attributeValue", new AttributeValueItem( attributeSet ) );
+ }
+ ;
+ exception
+ catch [Exception e]
+ {
+ throw new RecognitionException( "dnParser failed for " + token.getText() + " , " + e.getMessage() );
+ }
+
+selfValue
+{
+ log.debug( "entered selfValue()" );
+ Set<AttributeType> attributeTypeSet = null;
+}
+ :
+ ID_selfValue ( SP )+ attributeTypeSet=attributeTypeSet
+ {
+ protectedItemsMap.put( "sefValue", new SelfValueItem( attributeTypeSet ) );
+ }
+ ;
+
+rangeOfValues
+{
+ log.debug( "entered rangeOfValues()" );
+}
+ :
+ token:RANGE_OF_VALUES_CANDIDATE
+ {
+ protectedItemsMap.put( "rangeOfValues",
+ new RangeOfValuesItem(
+ FilterParser.parse( token.getText() ) ) );
+ log.debug( "filterParser parsed " + token.getText() );
+ }
+ ;
+ exception
+ catch [Exception e]
+ {
+ throw new RecognitionException( "filterParser failed. " + e.getMessage() );
+ }
+
+maxValueCount
+{
+ log.debug( "entered maxValueCount()" );
+ MaxValueCountElem maxValueCount = null;
+ Set<MaxValueCountElem> maxValueCountSet = new HashSet<MaxValueCountElem>();
+}
+ :
+ ID_maxValueCount ( SP )+
+ OPEN_CURLY ( SP )*
+ maxValueCount=aMaxValueCount ( SP )*
+ {
+ maxValueCountSet.add( maxValueCount );
+ }
+ ( SEP ( SP )* maxValueCount=aMaxValueCount ( SP )*
+ {
+ maxValueCountSet.add( maxValueCount );
+ }
+ )*
+ CLOSE_CURLY
+ {
+ protectedItemsMap.put( "maxValueCount", new MaxValueCountItem( maxValueCountSet ) );
+ }
+ ;
+
+aMaxValueCount returns [ MaxValueCountElem maxValueCount ]
+{
+ log.debug( "entered aMaxValueCount()" );
+ maxValueCount = null;
+ String oid = null;
+ Token token = null;
+ AttributeType attributeType = null;
+}
+ :
+ OPEN_CURLY ( SP )*
+ (
+ ID_type ( SP )+ oid=oid ( SP )* SEP ( SP )*
+ ID_maxCount ( SP )+ token1:INTEGER
+ { token = token1; }
+ | // relaxing
+ ID_maxCount ( SP )+ token2:INTEGER ( SP )* SEP ( SP )*
+ ID_type ( SP )+ oid=oid
+ { token = token2; }
+ )
+ ( SP )* CLOSE_CURLY
+ {
+ try
+ {
+ attributeType = schemaManager.lookupAttributeTypeRegistry( oid );
+ maxValueCount = new MaxValueCountElem( attributeType, token2Integer( token ) );
+ }
+ catch ( LdapException le )
+ {
+ // The oid does not exist
+ // TODO : deal with such an exception
+ }
+ }
+ ;
+
+maxImmSub
+{
+ log.debug( "entered maxImmSub()" );
+}
+ :
+ ID_maxImmSub ( SP )+ token:INTEGER
+ {
+
+ protectedItemsMap.put( "maxImmSub",
+ new MaxImmSubItem(
+ token2Integer( token ) ) );
+ }
+ ;
+
+restrictedBy
+{
+ log.debug( "entered restrictedBy()" );
+ RestrictedByElem restrictedValue = null;
+ Set<RestrictedByElem> restrictedBy = new HashSet<RestrictedByElem>();
+}
+ :
+ ID_restrictedBy ( SP )+
+ OPEN_CURLY ( SP )*
+ restrictedValue=restrictedValue ( SP )*
+ {
+ restrictedBy.add( restrictedValue );
+ }
+ ( SEP ( SP )* restrictedValue=restrictedValue ( SP )*
+ {
+ restrictedBy.add( restrictedValue );
+ }
+ )*
+ CLOSE_CURLY
+ {
+ protectedItemsMap.put( "restrictedBy", new RestrictedByItem( restrictedBy ) );
+ }
+ ;
+
+restrictedValue returns [ RestrictedByElem restrictedValue ]
+{
+ log.debug( "entered restrictedValue()" );
+ String typeOid = null;
+ String valuesInOid = null;
+ restrictedValue = null;
+ AttributeType attributeType = null;
+ AttributeType valueInAttributeType = null;
+}
+ :
+ OPEN_CURLY ( SP )*
+ (
+ ID_type ( SP )+ typeOid=oid ( SP )* SEP ( SP )*
+ ID_valuesIn ( SP )+ valuesInOid=oid
+ | // relaxing
+ ID_valuesIn ( SP )+ valuesInOid=oid ( SP )* SEP ( SP )*
+ ID_type ( SP )+ typeOid=oid
+ )
+ ( SP )* CLOSE_CURLY
+ {
+ try
+ {
+ attributeType = schemaManager.lookupAttributeTypeRegistry( typeOid );
+ valueInAttributeType = schemaManager.lookupAttributeTypeRegistry( valuesInOid );
+ restrictedValue = new RestrictedByElem( attributeType, valueInAttributeType );
+ }
+ catch ( LdapException le )
+ {
+ // The oid does not exist
+ // TODO : deal with such an exception
+ }
+ }
+ ;
+
+attributeTypeSet returns [ Set<AttributeType> attributeTypeSet ]
+{
+ log.debug( "entered attributeTypeSet()" );
+ String oid = null;
+ attributeTypeSet = new HashSet<AttributeType>();
+ AttributeType attributeType = null;
+}
+ :
+ OPEN_CURLY ( SP )*
+ oid=oid ( SP )*
+ {
+ try
+ {
+ attributeType = schemaManager.lookupAttributeTypeRegistry( oid );
+ attributeTypeSet.add( attributeType );
+ }
+ catch ( LdapException le )
+ {
+ // The oid does not exist
+ // TODO : deal with such an exception
+ }
+ }
+ ( SEP ( SP )* oid=oid ( SP )*
+ {
+ try
+ {
+ attributeType = schemaManager.lookupAttributeTypeRegistry( oid );
+ attributeTypeSet.add( attributeType );
+ }
+ catch ( LdapException le )
+ {
+ // The oid does not exist
+ // TODO : deal with such an exception
+ }
+ }
+ )*
+ CLOSE_CURLY
+ ;
+
+classes
+{
+ log.debug( "entered classes()" );
+ ExprNode classes = null;
+}
+ :
+ ID_classes ( SP )+ classes=refinement
+ {
+ protectedItemsMap.put( "classes", new ClassesItem( classes ) );
+ }
+ ;
+
+itemPermissions
+{
+ log.debug( "entered itemPermissions()" );
+ itemPermissions = new HashSet<ItemPermission>();
+ ItemPermission itemPermission = null;
+}
+ :
+ ID_itemPermissions ( SP )+
+ OPEN_CURLY ( SP )*
+ ( itemPermission=itemPermission ( SP )*
+ {
+ itemPermissions.add( itemPermission );
+ }
+ ( SEP ( SP )* itemPermission=itemPermission ( SP )*
+ {
+ itemPermissions.add( itemPermission );
+ }
+ )*
+ )?
+ CLOSE_CURLY
+ ;
+
+itemPermission returns [ ItemPermission itemPermission ]
+{
+ log.debug( "entered itemPermission()" );
+ itemPermission = null;
+ itemPermissionComponentsMonitor = new MandatoryAndOptionalComponentsMonitor(
+ new String [] { "userClasses", "grantsAndDenials" }, new String [] { "precedence" } );
+}
+ :
+ OPEN_CURLY ( SP )*
+ anyItemPermission ( SP )*
+ ( SEP ( SP )* anyItemPermission ( SP )* )*
+ CLOSE_CURLY
+ {
+ if ( !itemPermissionComponentsMonitor.finalStateValid() )
+ {
+ throw new RecognitionException( "Missing mandatory itemPermission components: "
+ + itemPermissionComponentsMonitor.getRemainingComponents() );
+ }
+
+ itemPermission = new ItemPermission( precedence, grantsAndDenials, userClasses );
+ precedence = null;
+ }
+ ;
+
+anyItemPermission
+ :
+ precedence
+ {
+ itemPermissionComponentsMonitor.useComponent( "precedence" );
+ }
+ | userClasses
+ {
+ itemPermissionComponentsMonitor.useComponent( "userClasses" );
+ }
+ | grantsAndDenials
+ {
+ itemPermissionComponentsMonitor.useComponent( "grantsAndDenials" );
+ }
+ ;
+ exception
+ catch [IllegalArgumentException e]
+ {
+ throw new RecognitionException( e.getMessage() );
+ }
+
+grantsAndDenials
+{
+ log.debug( "entered grantsAndDenials()" );
+ grantsAndDenials = new HashSet<GrantAndDenial>();
+ GrantAndDenial grantAndDenial = null;
+}
+ :
+ ID_grantsAndDenials ( SP )+
+ OPEN_CURLY ( SP )*
+ ( grantAndDenial = grantAndDenial ( SP )*
+ {
+ if ( !grantsAndDenials.add( grantAndDenial ))
+ {
+ throw new RecognitionException( "Duplicated GrantAndDenial bit: " + grantAndDenial );
+ }
+ }
+ ( SEP ( SP )* grantAndDenial = grantAndDenial ( SP )*
+ {
+ if ( !grantsAndDenials.add( grantAndDenial ))
+ {
+ throw new RecognitionException( "Duplicated GrantAndDenial bit: " + grantAndDenial );
+ }
+ }
+ )*
+ )?
+ CLOSE_CURLY
+ ;
+
+grantAndDenial returns [ GrantAndDenial l_grantAndDenial ]
+{
+ log.debug( "entered grantAndDenialsBit()" );
+ l_grantAndDenial = null;
+}
+ :
+ ID_grantAdd { l_grantAndDenial = GrantAndDenial.GRANT_ADD; }
+ | ID_denyAdd { l_grantAndDenial = GrantAndDenial.DENY_ADD; }
+ | ID_grantDiscloseOnError { l_grantAndDenial = GrantAndDenial.GRANT_DISCLOSE_ON_ERROR; }
+ | ID_denyDiscloseOnError { l_grantAndDenial = GrantAndDenial.DENY_DISCLOSE_ON_ERROR; }
+ | ID_grantRead { l_grantAndDenial = GrantAndDenial.GRANT_READ; }
+ | ID_denyRead { l_grantAndDenial = GrantAndDenial.DENY_READ; }
+ | ID_grantRemove { l_grantAndDenial = GrantAndDenial.GRANT_REMOVE; }
+ | ID_denyRemove { l_grantAndDenial = GrantAndDenial.DENY_REMOVE; }
+ //-- permissions that may be used only in conjunction
+ //-- with the entry component
+ | ID_grantBrowse { l_grantAndDenial = GrantAndDenial.GRANT_BROWSE; }
+ | ID_denyBrowse { l_grantAndDenial = GrantAndDenial.DENY_BROWSE; }
+ | ID_grantExport { l_grantAndDenial = GrantAndDenial.GRANT_EXPORT; }
+ | ID_denyExport { l_grantAndDenial = GrantAndDenial.DENY_EXPORT; }
+ | ID_grantImport { l_grantAndDenial = GrantAndDenial.GRANT_IMPORT; }
+ | ID_denyImport { l_grantAndDenial = GrantAndDenial.DENY_IMPORT; }
+ | ID_grantModify { l_grantAndDenial = GrantAndDenial.GRANT_MODIFY; }
+ | ID_denyModify { l_grantAndDenial = GrantAndDenial.DENY_MODIFY; }
+ | ID_grantRename { l_grantAndDenial = GrantAndDenial.GRANT_RENAME; }
+ | ID_denyRename { l_grantAndDenial = GrantAndDenial.DENY_RENAME; }
+ | ID_grantReturnDN { l_grantAndDenial = GrantAndDenial.GRANT_RETURN_DN; }
+ | ID_denyReturnDN { l_grantAndDenial = GrantAndDenial.DENY_RETURN_DN; }
+ //-- permissions that may be used in conjunction
+ //-- with any component, except entry, of ProtectedItems
+ | ID_grantCompare { l_grantAndDenial = GrantAndDenial.GRANT_COMPARE; }
+ | ID_denyCompare { l_grantAndDenial = GrantAndDenial.DENY_COMPARE; }
+ | ID_grantFilterMatch { l_grantAndDenial = GrantAndDenial.GRANT_FILTER_MATCH; }
+ | ID_denyFilterMatch { l_grantAndDenial = GrantAndDenial.DENY_FILTER_MATCH; }
+ | ID_grantInvoke { l_grantAndDenial = GrantAndDenial.GRANT_INVOKE; }
+ | ID_denyInvoke { l_grantAndDenial = GrantAndDenial.DENY_INVOKE; }
+ ;
+
+userClasses
+{
+ log.debug( "entered userClasses()" );
+ userClassesMap = new NoDuplicateKeysMap();
+}
+ :
+ ID_userClasses ( SP )+
+ OPEN_CURLY ( SP )*
+ (
+ userClass ( SP )*
+ ( SEP ( SP )* userClass ( SP )* )*
+ )?
+ CLOSE_CURLY
+ {
+ userClasses = new HashSet<UserClass>( userClassesMap.values() );
+ }
+ ;
+ exception
+ catch [IllegalArgumentException e]
+ {
+ throw new RecognitionException( "User Classes cannot be duplicated. " + e.getMessage() );
+ }
+
+userClass
+{
+ log.debug( "entered userClasses()" );
+}
+ :
+ allUsers
+ | thisEntry
+ | parentOfEntry
+ | name
+ | userGroup
+ | subtree
+ ;
+
+allUsers
+{
+ log.debug( "entered allUsers()" );
+}
+ :
+ ID_allUsers
+ {
+ userClassesMap.put( "allUsers", UserClass.ALL_USERS );
+ }
+ ;
+
+thisEntry
+{
+ log.debug( "entered thisEntry()" );
+}
+ :
+ ID_thisEntry
+ {
+ userClassesMap.put( "thisEntry", UserClass.THIS_ENTRY );
+ }
+ ;
+
+parentOfEntry
+{
+ log.debug( "entered parentOfEntry()" );
+}
+ :
+ ID_parentOfEntry
+ {
+ userClassesMap.put( "parentOfEntry", UserClass.PARENT_OF_ENTRY );
+ }
+ ;
+
+name
+{
+ log.debug( "entered name()" );
+ Set<DN> names = new HashSet<DN>();
+ DN distinguishedName = null;
+}
+ :
+ ID_name ( SP )+
+ OPEN_CURLY ( SP )*
+ distinguishedName=distinguishedName ( SP )*
+ {
+ names.add( distinguishedName );
+ }
+ ( SEP ( SP )* distinguishedName=distinguishedName ( SP )*
+ {
+ names.add( distinguishedName );
+ } )*
+ CLOSE_CURLY
+ {
+ userClassesMap.put( "name", new UserClass.Name( names ) );
+ }
+ ;
+
+userGroup
+{
+ log.debug( "entered userGroup()" );
+ Set<DN> userGroup = new HashSet<DN>();
+ DN distinguishedName = null;
+}
+ :
+ ID_userGroup ( SP )+
+ OPEN_CURLY ( SP )*
+ distinguishedName=distinguishedName ( SP )*
+ {
+ userGroup.add( distinguishedName );
+ }
+ ( SEP ( SP )* distinguishedName=distinguishedName ( SP )*
+ {
+ userGroup.add( distinguishedName );
+ } )*
+ CLOSE_CURLY
+ {
+ userClassesMap.put( "userGroup", new UserClass.UserGroup( userGroup ) );
+ }
+ ;
+
+subtree
+{
+ log.debug( "entered subtree()" );
+ Set<SubtreeSpecification> subtrees = new HashSet<SubtreeSpecification>();
+ SubtreeSpecification subtreeSpecification = null;
+}
+ :
+ ID_subtree ( SP )+
+ OPEN_CURLY ( SP )*
+ subtreeSpecification=subtreeSpecification ( SP )*
+ {
+ subtrees.add( subtreeSpecification );
+ }
+ ( SEP ( SP )* subtreeSpecification=subtreeSpecification ( SP )*
+ {
+ subtrees.add( subtreeSpecification );
+ } )*
+ CLOSE_CURLY
+ {
+ userClassesMap.put( "subtree", new UserClass.Subtree( subtrees ) );
+ }
+ ;
+
+userPermissions
+{
+ log.debug( "entered userPermissions()" );
+ userPermissions = new HashSet<UserPermission>();
+ UserPermission userPermission = null;
+}
+ :
+ ID_userPermissions ( SP )+
+ OPEN_CURLY ( SP )*
+ ( userPermission=userPermission ( SP )*
+ {
+ userPermissions.add( userPermission );
+ }
+ ( SEP ( SP )* userPermission=userPermission ( SP )*
+ {
+ userPermissions.add( userPermission );
+ }
+ )*
+ )?
+ CLOSE_CURLY
+ ;
+
+userPermission returns [ UserPermission userPermission ]
+{
+ log.debug( "entered userPermission()" );
+ userPermission = null;
+ userPermissionComponentsMonitor = new MandatoryAndOptionalComponentsMonitor(
+ new String [] { "protectedItems", "grantsAndDenials" }, new String [] { "precedence" } );
+}
+ :
+ OPEN_CURLY ( SP )*
+ anyUserPermission ( SP )*
+ ( SEP ( SP )* anyUserPermission ( SP )* )*
+ CLOSE_CURLY
+ {
+ if ( !userPermissionComponentsMonitor.finalStateValid() )
+ {
+ throw new RecognitionException( "Missing mandatory userPermission components: "
+ + userPermissionComponentsMonitor.getRemainingComponents() );
+ }
+
+ userPermission = new UserPermission( precedence, grantsAndDenials, protectedItems );
+ precedence = null;
+ }
+ ;
+
+anyUserPermission
+ :
+ precedence
+ {
+ userPermissionComponentsMonitor.useComponent( "precedence" );
+ }
+ | protectedItems
+ {
+ userPermissionComponentsMonitor.useComponent( "protectedItems" );
+ }
+ | grantsAndDenials
+ {
+ userPermissionComponentsMonitor.useComponent( "grantsAndDenials" );
+ }
+ ;
+ exception
+ catch [IllegalArgumentException e]
+ {
+ throw new RecognitionException( e.getMessage() );
+ }
+
+subtreeSpecification returns [SubtreeSpecification ss]
+{
+ log.debug( "entered subtreeSpecification()" );
+ // clear out ss, ssModifier, chopBeforeExclusions and chopAfterExclusions
+ // in case something is left from the last parse
+ ss = null;
+ ssModifier = new SubtreeSpecificationModifier();
+ chopBeforeExclusions = new HashSet<DN>();
+ chopAfterExclusions = new HashSet<DN>();
+ subtreeSpecificationComponentsMonitor = new OptionalComponentsMonitor(
+ new String [] { "base", "specificExclusions", "minimum", "maximum" } );
+}
+ :
+ OPEN_CURLY ( SP )*
+ ( subtreeSpecificationComponent ( SP )*
+ ( SEP ( SP )* subtreeSpecificationComponent ( SP )* )* )?
+ CLOSE_CURLY
+ {
+ ss = ssModifier.getSubtreeSpecification();
+ }
+ ;
+
+subtreeSpecificationComponent
+{
+ log.debug( "entered subtreeSpecification()" );
+}
+ :
+ ss_base
+ {
+ subtreeSpecificationComponentsMonitor.useComponent( "base" );
+ }
+ | ss_specificExclusions
+ {
+ subtreeSpecificationComponentsMonitor.useComponent( "specificExclusions" );
+ }
+ | ss_minimum
+ {
+ subtreeSpecificationComponentsMonitor.useComponent( "minimum" );
+ }
+ | ss_maximum
+ {
+ subtreeSpecificationComponentsMonitor.useComponent( "maximum" );
+ }
+ ;
+ exception
+ catch [IllegalArgumentException e]
+ {
+ throw new RecognitionException( e.getMessage() );
+ }
+
+ss_base
+{
+ log.debug( "entered ss_base()" );
+ DN base = null;
+}
+ :
+ ID_base ( SP )+ base=distinguishedName
+ {
+ ssModifier.setBase( base );
+ }
+ ;
+
+ss_specificExclusions
+{
+ log.debug( "entered ss_specificExclusions()" );
+}
+ :
+ ID_specificExclusions ( SP )+ specificExclusions
+ {
+ ssModifier.setChopBeforeExclusions( chopBeforeExclusions );
+ ssModifier.setChopAfterExclusions( chopAfterExclusions );
+ }
+ ;
+
+specificExclusions
+{
+ log.debug( "entered specificExclusions()" );
+}
+ :
+ OPEN_CURLY ( SP )*
+ ( specificExclusion ( SP )*
+ ( SEP ( SP )* specificExclusion ( SP )* )*
+ )?
+ CLOSE_CURLY
+ ;
+
+specificExclusion
+{
+ log.debug( "entered specificExclusion()" );
+}
+ :
+ chopBefore | chopAfter
+ ;
+
+chopBefore
+{
+ log.debug( "entered chopBefore()" );
+ DN chopBeforeExclusion = null;
+}
+ :
+ ID_chopBefore ( SP )* COLON ( SP )* chopBeforeExclusion=distinguishedName
+ {
+ chopBeforeExclusions.add( chopBeforeExclusion );
+ }
+ ;
+
+chopAfter
+{
+ log.debug( "entered chopAfter()" );
+ DN chopAfterExclusion = null;
+}
+ :
+ ID_chopAfter ( SP )* COLON ( SP )* chopAfterExclusion=distinguishedName
+ {
+ chopAfterExclusions.add( chopAfterExclusion );
+ }
+ ;
+
+ss_minimum
+{
+ log.debug( "entered ss_minimum()" );
+ int minimum = 0;
+}
+ :
+ ID_minimum ( SP )+ minimum=baseDistance
+ {
+ ssModifier.setMinBaseDistance( minimum );
+ }
+ ;
+
+ss_maximum
+{
+ log.debug( "entered ss_maximum()" );
+ int maximum = 0;
+}
+ :
+ ID_maximum ( SP )+ maximum=baseDistance
+ {
+ ssModifier.setMaxBaseDistance( maximum );
+ }
+ ;
+
+distinguishedName returns [ DN name ]
+{
+ log.debug( "entered distinguishedName()" );
+ name = null;
+}
+ :
+ token:SAFEUTF8STRING
+ {
+ name = new DN( token.getText() );
+ if ( schemaManager != null )
+ {
+ name.normalize( schemaManager.getNormalizerMapping() );
+ }
+ log.debug( "recognized a DistinguishedName: " + token.getText() );
+ }
+ ;
+ exception
+ catch [Exception e]
+ {
+ throw new RecognitionException( "dnParser failed for " + token.getText() + " " + e.getMessage() );
+ }
+
+baseDistance returns [ int distance ]
+{
+ log.debug( "entered baseDistance()" );
+ distance = 0;
+}
+ :
+ token:INTEGER
+ {
+ distance = token2Integer( token );
+ }
+ ;
+
+oid returns [ String result ]
+{
+ log.debug( "entered oid()" );
+ result = null;
+ Token token = null;
+}
+ :
+ { token = LT( 1 ); } // an interesting trick goes here ;-)
+ ( DESCR | NUMERICOID )
+ {
+ result = token.getText();
+ log.debug( "recognized an oid: " + result );
+ }
+ ;
+
+refinement returns [ ExprNode node ]
+{
+ log.debug( "entered refinement()" );
+ node = null;
+}
+ :
+ node=item | node=and | node=or | node=not
+ ;
+
+item returns [ LeafNode node ]
+{
+ log.debug( "entered item()" );
+ node = null;
+ String oid = null;
+}
+ :
+ ID_item ( SP )* COLON ( SP )* oid=oid
+ {
+ node = new EqualityNode( SchemaConstants.OBJECT_CLASS_AT , new StringValue( oid ) );
+ }
+ ;
+
+and returns [ BranchNode node ]
+{
+ log.debug( "entered and()" );
+ node = null;
+ List<ExprNode> children = null;
+}
+ :
+ ID_and ( SP )* COLON ( SP )* children=refinements
+ {
+ node = new AndNode( children );
+ }
+ ;
+
+or returns [ BranchNode node ]
+{
+ log.debug( "entered or()" );
+ node = null;
+ List<ExprNode> children = null;
+}
+ :
+ ID_or ( SP )* COLON ( SP )* children=refinements
+ {
+ node = new OrNode( children );
+ }
+ ;
+
+not returns [ BranchNode node ]
+{
+ log.debug( "entered not()" );
+ node = null;
+ List<ExprNode> children = null;
+}
+ :
+ ID_not ( SP )* COLON ( SP )* children=refinements
+ {
+ node = new NotNode( children );
+ }
+ ;
+
+refinements returns [ List<ExprNode> children ]
+{
+ log.debug( "entered refinements()" );
+ children = null;
+ ExprNode child = null;
+ List<ExprNode> tempChildren = new ArrayList<ExprNode>();
+}
+ :
+ OPEN_CURLY ( SP )*
+ (
+ child=refinement ( SP )*
+ {
+ tempChildren.add( child );
+ }
+ ( SEP ( SP )* child=refinement ( SP )*
+ {
+ tempChildren.add( child );
+ } )*
+ )? CLOSE_CURLY
+ {
+ children = tempChildren;
+ }
+ ;
+
+
+// ----------------------------------------------------------------------------
+// lexer class definition
+// ----------------------------------------------------------------------------
+
+/**
+ * The parser's primary lexer.
+ *
+ * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
+ */
+class AntlrACIItemLexer extends Lexer;
+
+
+// ----------------------------------------------------------------------------
+// lexer options
+// ----------------------------------------------------------------------------
+
+options
+{
+ k = 2;
+ charVocabulary = '\3'..'\377';
+}
+
+
+//----------------------------------------------------------------------------
+// tokens
+//----------------------------------------------------------------------------
+
+tokens
+{
+ ID_identificationTag = "identificationTag";
+ ID_precedence = "precedence";
+ ID_FALSE = "FALSE";
+ ID_TRUE = "TRUE";
+ ID_none = "none";
+ ID_simple = "simple";
+ ID_strong = "strong";
+ ID_level = "level";
+ ID_basicLevels = "basicLevels";
+ ID_localQualifier = "localQualifier";
+ ID_signed = "signed";
+ ID_authenticationLevel = "authenticationLevel";
+ ID_itemOrUserFirst = "itemOrUserFirst";
+ ID_itemFirst = "itemFirst";
+ ID_userFirst = "userFirst";
+ ID_protectedItems = "protectedItems";
+ ID_classes = "classes";
+ ID_entry = "entry";
+ ID_allUserAttributeTypes = "allUserAttributeTypes";
+ ID_attributeType = "attributeType";
+ ID_allAttributeValues = "allAttributeValues";
+ ID_allUserAttributeTypesAndValues = "allUserAttributeTypesAndValues";
+ ID_selfValue = "selfValue";
+ ID_item = "item";
+ ID_and = "and";
+ ID_or = "or";
+ ID_not = "not";
+ ID_rangeOfValues = "rangeOfValues";
+ ID_maxValueCount = "maxValueCount";
+ ID_type = "type";
+ ID_maxCount = "maxCount";
+ ID_maxImmSub = "maxImmSub";
+ ID_restrictedBy = "restrictedBy";
+ ID_valuesIn = "valuesIn";
+ ID_userClasses = "userClasses";
+ ID_base = "base";
+ ID_specificExclusions = "specificExclusions";
+ ID_chopBefore = "chopBefore";
+ ID_chopAfter = "chopAfter";
+ ID_minimum = "minimum";
+ ID_maximum = "maximum";
+ ID_specificationFilter = "specificationFilter";
+ ID_grantsAndDenials = "grantsAndDenials";
+ ID_itemPermissions = "itemPermissions";
+ ID_userPermissions = "userPermissions";
+ ID_allUsers = "allUsers";
+ ID_thisEntry = "thisEntry";
+ ID_parentOfEntry = "parentOfEntry";
+ ID_subtree = "subtree";
+ ID_name = "name";
+ ID_userGroup = "userGroup";
+
+ ID_grantAdd = "grantAdd"; // (0),
+ ID_denyAdd = "denyAdd"; // (1),
+ ID_grantDiscloseOnError = "grantDiscloseOnError"; // (2),
+ ID_denyDiscloseOnError = "denyDiscloseOnError"; // (3),
+ ID_grantRead = "grantRead"; // (4),
+ ID_denyRead = "denyRead"; // (5),
+ ID_grantRemove = "grantRemove"; // (6),
+ ID_denyRemove = "denyRemove"; // (7),
+ //-- permissions that may be used only in conjunction
+ //-- with the entry component
+ ID_grantBrowse = "grantBrowse"; // (8),
+ ID_denyBrowse = "denyBrowse"; // (9),
+ ID_grantExport = "grantExport"; // (10),
+ ID_denyExport = "denyExport"; // (11),
+ ID_grantImport = "grantImport"; // (12),
+ ID_denyImport = "denyImport"; // (13),
+ ID_grantModify = "grantModify"; // (14),
+ ID_denyModify = "denyModify"; // (15),
+ ID_grantRename = "grantRename"; // (16),
+ ID_denyRename = "denyRename"; // (17),
+ ID_grantReturnDN = "grantReturnDN"; // (18),
+ ID_denyReturnDN = "denyReturnDN"; // (19),
+ //-- permissions that may be used in conjunction
+ //-- with any component, except entry, of ProtectedItems
+ ID_grantCompare = "grantCompare"; // (20),
+ ID_denyCompare = "denyCompare"; // (21),
+ ID_grantFilterMatch = "grantFilterMatch"; // (22),
+ ID_denyFilterMatch = "denyFilterMatch"; // (23),
+ ID_grantInvoke = "grantInvoke"; // (24),
+ ID_denyInvoke = "denyInvoke"; // (25)
+}
+
+
+// ----------------------------------------------------------------------------
+// lexer initialization
+// ----------------------------------------------------------------------------
+
+{
+ private static final Logger log = LoggerFactory.getLogger( AntlrACIItemLexer.class );
+}
+
+
+// ----------------------------------------------------------------------------
+// attribute description lexer rules from models
+// ----------------------------------------------------------------------------
+
+// This is all messed up - could not figure out how to get antlr to represent
+// the safe UTF-8 character set from RFC 3642 for production SafeUTF8Character
+
+protected SAFEUTF8CHAR :
+ '\u0001'..'\u0021' |
+ '\u0023'..'\u007F' |
+ '\u00c0'..'\u00d6' |
+ '\u00d8'..'\u00f6' |
+ '\u00f8'..'\u00ff' |
+ '\u0100'..'\u1fff' |
+ '\u3040'..'\u318f' |
+ '\u3300'..'\u337f' |
+ '\u3400'..'\u3d2d' |
+ '\u4e00'..'\u9fff' |
+ '\uf900'..'\ufaff' ;
+
+OPEN_CURLY : '{' ;
+
+CLOSE_CURLY : '}' ;
+
+SEP : ',' ;
+
+SP : ' ' | '\t' | '\n' { newline(); } | '\r' ;
+
+COLON : ':' ;
+
+protected DIGIT : '0' | LDIGIT ;
+
+protected LDIGIT : '1'..'9' ;
+
+protected ALPHA : 'A'..'Z' | 'a'..'z' ;
+
+protected INTEGER : DIGIT | ( LDIGIT ( DIGIT )+ ) ;
+
+protected HYPHEN : '-' ;
+
+protected NUMERICOID : INTEGER ( DOT INTEGER )+ ;
+
+protected DOT : '.' ;
+
+INTEGER_OR_NUMERICOID
+ :
+ ( INTEGER DOT ) => NUMERICOID
+ {
+ $setType( NUMERICOID );
+ }
+ |
+ INTEGER
+ {
+ $setType( INTEGER );
+ }
+ ;
+
+SAFEUTF8STRING : '"'! ( SAFEUTF8CHAR )* '"'! ;
+
+DESCR // THIS RULE ALSO STANDS FOR AN IDENTIFIER
+ :
+ ( "attributeValue" ( SP! )+ '{' ) =>
+ "attributeValue"! ( SP! )+ '{'! ( options { greedy = false; } : . )* '}'!
+ { $setType( ATTRIBUTE_VALUE_CANDIDATE ); }
+ | ( "rangeOfValues" ( SP! )+ '(' ) =>
+ "rangeOfValues"! ( SP! )+ FILTER
+ { $setType( RANGE_OF_VALUES_CANDIDATE ); }
+ | ALPHA ( ALPHA | DIGIT | HYPHEN )*
+ ;
+
+protected FILTER : '(' ( ( '&' (SP)* (FILTER)+ ) | ( '|' (SP)* (FILTER)+ ) | ( '!' (SP)* FILTER ) | FILTER_VALUE ) ')' (SP)* ;
+
+protected FILTER_VALUE : (options{greedy=true;}: ~( ')' | '(' | '&' | '|' | '!' ) ( ~(')') )* ) ;
+