You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jackrabbit.apache.org by ju...@apache.org on 2009/09/08 18:09:45 UTC
svn commit: r812570 [8/24] - in /jackrabbit/sandbox/JCR-1456: ./
jackrabbit-api/ jackrabbit-api/src/main/appended-resources/
jackrabbit-api/src/main/appended-resources/META-INF/
jackrabbit-api/src/main/java/org/apache/jackrabbit/api/security/ jackrabbi...
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/bundle/AbstractBundlePersistenceManager.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/bundle/AbstractBundlePersistenceManager.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/bundle/AbstractBundlePersistenceManager.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/bundle/AbstractBundlePersistenceManager.java Tue Sep 8 16:09:28 2009
@@ -706,7 +706,7 @@
private void putBundle(NodePropBundle bundle) throws ItemStateException {
storeBundle(bundle);
bundle.markOld();
- log.debug("stored bundle " + bundle.getId());
+ log.debug("stored bundle {}", bundle.getId());
missing.remove(bundle.getId());
// only put to cache if already exists. this is to ensure proper overwrite
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/bundle/BundleDbPersistenceManager.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/bundle/BundleDbPersistenceManager.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/bundle/BundleDbPersistenceManager.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/bundle/BundleDbPersistenceManager.java Tue Sep 8 16:09:28 2009
@@ -59,7 +59,6 @@
import org.apache.jackrabbit.core.util.db.ConnectionHelper;
import org.apache.jackrabbit.core.util.db.DbUtility;
import org.apache.jackrabbit.core.util.db.StreamWrapper;
-import org.apache.jackrabbit.uuid.UUID;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -81,6 +80,7 @@
* <li><param name="{@link #setSchemaObjectPrefix(String) schemaObjectPrefix}" value=""/>
* <li><param name="{@link #setErrorHandling(String) errorHandling}" value=""/>
* <li><param name="{@link #setBlockOnConnectionLoss(String) blockOnConnectionLoss}" value="false"/>
+ * <li><param name="{@link #setSchemaCheckEnabled(String) schemaCheckEnabled}" value="true"/>
* </ul>
*/
public class BundleDbPersistenceManager extends AbstractBundlePersistenceManager {
@@ -178,6 +178,11 @@
private String name = super.toString();
/**
+ * Whether the schema check must be done during initialization.
+ */
+ private boolean schemaCheckEnabled = true;
+
+ /**
* Returns the configured JDBC connection url.
* @return the configured JDBC connection url.
*/
@@ -438,6 +443,20 @@
}
/**
+ * @return whether the schema check is enabled
+ */
+ public final boolean isSchemaCheckEnabled() {
+ return schemaCheckEnabled;
+ }
+
+ /**
+ * @param enabled set whether the schema check is enabled
+ */
+ public final void setSchemaCheckEnabled(boolean enabled) {
+ schemaCheckEnabled = enabled;
+ }
+
+ /**
* {@inheritDoc}
*
* Basically wraps a JDBC transaction around super.store().
@@ -487,7 +506,9 @@
schemaObjectPrefix = conHelper.prepareDbIdentifier(schemaObjectPrefix);
// check if schema objects exist and create them if necessary
- createCheckSchemaOperation().run();
+ if (isSchemaCheckEnabled()) {
+ createCheckSchemaOperation().run();
+ }
// create correct blob store
blobStore = createBlobStore();
@@ -879,27 +900,6 @@
}
/**
- * Sets the key parameters to the prepared statement, starting at
- * <code>pos</code> and returns the number of key parameters + pos.
- *
- * @param stmt the statement
- * @param uuid the uuid of the key
- * @param pos the position of the key parameter
- * @return the number of key parameters + <code>pos</code>
- * @throws SQLException if an SQL error occurs.
- */
- protected int setKey(PreparedStatement stmt, UUID uuid, int pos)
- throws SQLException {
- if (getStorageModel() == SM_BINARY_KEYS) {
- stmt.setBytes(pos++, uuid.getRawBytes());
- } else {
- stmt.setLong(pos++, uuid.getMostSignificantBits());
- stmt.setLong(pos++, uuid.getLeastSignificantBits());
- }
- return pos;
- }
-
- /**
* Constructs a parameter list for a PreparedStatement
* for the given node identifier.
*
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/db/DatabasePersistenceManager.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/db/DatabasePersistenceManager.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/db/DatabasePersistenceManager.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/db/DatabasePersistenceManager.java Tue Sep 8 16:09:28 2009
@@ -84,6 +84,11 @@
protected boolean externalBLOBs;
+ /**
+ * Whether the schema check must be done during initialization.
+ */
+ private boolean schemaCheckEnabled = true;
+
// initial size of buffer used to serialize objects
protected static final int INITIAL_BUFFER_SIZE = 1024;
@@ -182,6 +187,20 @@
this.externalBLOBs = Boolean.valueOf(externalBLOBs).booleanValue();
}
+ /**
+ * @return whether the schema check is enabled
+ */
+ public final boolean isSchemaCheckEnabled() {
+ return schemaCheckEnabled;
+ }
+
+ /**
+ * @param enabled set whether the schema check is enabled
+ */
+ public final void setSchemaCheckEnabled(boolean enabled) {
+ schemaCheckEnabled = enabled;
+ }
+
//---------------------------------------------------< PersistenceManager >
/**
* {@inheritDoc}
@@ -206,7 +225,9 @@
prepareSchemaObjectPrefix();
// check if schema objects exist and create them if necessary
- checkSchema();
+ if (isSchemaCheckEnabled()) {
+ checkSchema();
+ }
// build sql statements
buildSQLStatements();
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/util/Serializer.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/util/Serializer.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/util/Serializer.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/util/Serializer.java Tue Sep 8 16:09:28 2009
@@ -28,7 +28,6 @@
import org.apache.jackrabbit.core.state.ChildNodeEntry;
import org.apache.jackrabbit.core.value.InternalValue;
import org.apache.jackrabbit.spi.Name;
-import org.apache.jackrabbit.uuid.UUID;
import org.apache.jackrabbit.spi.commons.name.NameFactoryImpl;
import javax.jcr.PropertyType;
@@ -126,7 +125,7 @@
String s = in.readUTF();
state.setNodeTypeName(NameFactoryImpl.getInstance().create(s));
// parentUUID (may be null)
- byte[] uuidBytes = new byte[UUID.UUID_BYTE_LENGTH];
+ byte[] uuidBytes = new byte[NodeId.UUID_BYTE_LENGTH];
in.readFully(uuidBytes);
if (!Arrays.equals(uuidBytes, NULL_UUID_PLACEHOLDER_BYTES)) {
state.setParentId(new NodeId(uuidBytes));
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/OnWorkspaceInconsistency.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/OnWorkspaceInconsistency.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/OnWorkspaceInconsistency.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/OnWorkspaceInconsistency.java Tue Sep 8 16:09:28 2009
@@ -66,7 +66,8 @@
}
};
- protected static final Map INSTANCES = new HashMap();
+ protected static final Map<String, OnWorkspaceInconsistency> INSTANCES
+ = new HashMap<String, OnWorkspaceInconsistency>();
static {
INSTANCES.put(FAIL.name, FAIL);
@@ -79,6 +80,8 @@
/**
* Protected constructor.
+ *
+ * @param name a unique name for this handler.
*/
protected OnWorkspaceInconsistency(String name) {
this.name = name;
@@ -103,7 +106,7 @@
*/
public static OnWorkspaceInconsistency fromString(String name)
throws IllegalArgumentException {
- OnWorkspaceInconsistency handler = (OnWorkspaceInconsistency) INSTANCES.get(name.toLowerCase());
+ OnWorkspaceInconsistency handler = INSTANCES.get(name.toLowerCase());
if (handler == null) {
throw new IllegalArgumentException("Unknown name: " + name);
} else {
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/PropertyTypeRegistry.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/PropertyTypeRegistry.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/PropertyTypeRegistry.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/PropertyTypeRegistry.java Tue Sep 8 16:09:28 2009
@@ -53,7 +53,7 @@
private final NodeTypeRegistry registry;
/** Property Name to TypeMapping[] mapping */
- private final Map typeMapping = new HashMap();
+ private final Map<Name, TypeMapping[]> typeMapping = new HashMap<Name, TypeMapping[]>();
/**
* Creates a new <code>PropertyTypeRegistry</code> instance. This instance
@@ -75,7 +75,7 @@
*/
public TypeMapping[] getPropertyTypes(Name propName) {
synchronized (typeMapping) {
- TypeMapping[] types = (TypeMapping[]) typeMapping.get(propName);
+ TypeMapping[] types = typeMapping.get(propName);
if (types != null) {
return types;
} else {
@@ -89,12 +89,12 @@
NodeTypeDef def = registry.getNodeTypeDef(ntName);
PropDef[] propDefs = def.getPropertyDefs();
synchronized (typeMapping) {
- for (int i = 0; i < propDefs.length; i++) {
- int type = propDefs[i].getRequiredType();
- if (!propDefs[i].definesResidual() && type != PropertyType.UNDEFINED) {
- Name name = propDefs[i].getName();
+ for (PropDef propDef : propDefs) {
+ int type = propDef.getRequiredType();
+ if (!propDef.definesResidual() && type != PropertyType.UNDEFINED) {
+ Name name = propDef.getName();
// only remember defined property types
- TypeMapping[] types = (TypeMapping[]) typeMapping.get(name);
+ TypeMapping[] types = typeMapping.get(name);
if (types == null) {
types = new TypeMapping[1];
} else {
@@ -102,7 +102,7 @@
System.arraycopy(types, 0, tmp, 0, types.length);
types = tmp;
}
- types[types.length - 1] = new TypeMapping(ntName, type, propDefs[i].isMultiple());
+ types[types.length - 1] = new TypeMapping(ntName, type, propDef.isMultiple());
typeMapping.put(name, types);
}
}
@@ -120,18 +120,18 @@
public void nodeTypeUnregistered(Name ntName) {
// remove all TypeMapping instances refering to this ntName
synchronized (typeMapping) {
- Map modified = new HashMap();
+ Map<Name, TypeMapping[]> modified = new HashMap<Name, TypeMapping[]>();
for (Iterator it = typeMapping.keySet().iterator(); it.hasNext();) {
Name propName = (Name) it.next();
- TypeMapping[] mapping = (TypeMapping[]) typeMapping.get(propName);
- List remove = null;
- for (int i = 0; i < mapping.length; i++) {
- if (mapping[i].ntName.equals(ntName)) {
+ TypeMapping[] mapping = typeMapping.get(propName);
+ List<TypeMapping> remove = null;
+ for (TypeMapping tm : mapping) {
+ if (tm.ntName.equals(ntName)) {
if (remove == null) {
// not yet created
- remove = new ArrayList(mapping.length);
+ remove = new ArrayList<TypeMapping>(mapping.length);
}
- remove.add(mapping[i]);
+ remove.add(tm);
}
}
if (remove != null) {
@@ -140,7 +140,7 @@
// all removed -> done
} else {
// only some removed
- List remaining = new ArrayList(Arrays.asList(mapping));
+ List<TypeMapping> remaining = new ArrayList<TypeMapping>(Arrays.asList(mapping));
remaining.removeAll(remove);
modified.put(propName, remaining.toArray(new TypeMapping[remaining.size()]));
}
@@ -156,9 +156,8 @@
* from the {@link org.apache.jackrabbit.core.nodetype.NodeTypeRegistry}.
*/
private void fillCache() {
- Name[] ntNames = registry.getRegisteredNodeTypes();
- for (int i = 0; i < ntNames.length; i++) {
- nodeTypeRegistered(ntNames[i]);
+ for (Name ntName : registry.getRegisteredNodeTypes()) {
+ nodeTypeRegistered(ntName);
}
}
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/QueryManagerImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/QueryManagerImpl.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/QueryManagerImpl.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/QueryManagerImpl.java Tue Sep 8 16:09:28 2009
@@ -36,7 +36,6 @@
import org.apache.jackrabbit.spi.commons.query.qom.QueryObjectModelFactoryImpl;
import org.apache.jackrabbit.spi.commons.query.qom.QueryObjectModelTree;
import org.apache.jackrabbit.spi.commons.name.NameConstants;
-import org.apache.jackrabbit.uuid.UUID;
/**
* This class implements the {@link QueryManager} interface.
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AbstractExcerpt.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AbstractExcerpt.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AbstractExcerpt.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AbstractExcerpt.java Tue Sep 8 16:09:28 2009
@@ -36,7 +36,6 @@
import java.io.Reader;
import java.util.Set;
import java.util.HashSet;
-import java.util.Iterator;
import java.util.TreeMap;
import java.util.SortedMap;
import java.util.Arrays;
@@ -177,14 +176,12 @@
/**
* @return the extracted terms from the query.
*/
- protected final Set getQueryTerms() {
- Set extractedTerms = new HashSet();
- Set relevantTerms = new HashSet();
+ protected final Set<Term> getQueryTerms() {
+ Set<Term> extractedTerms = new HashSet<Term>();
+ Set<Term> relevantTerms = new HashSet<Term>();
query.extractTerms(extractedTerms);
// only keep terms for fulltext fields
- Iterator it = extractedTerms.iterator();
- while (it.hasNext()) {
- Term t = (Term) it.next();
+ for (Term t : extractedTerms) {
if (t.field().equals(FieldNames.FULLTEXT)) {
relevantTerms.add(t);
} else {
@@ -230,15 +227,15 @@
*/
private TermPositionVector createTermPositionVector(String text) {
// term -> TermVectorOffsetInfo[]
- final SortedMap termMap = new TreeMap();
+ final SortedMap<String, TermVectorOffsetInfo[]> termMap =
+ new TreeMap<String, TermVectorOffsetInfo[]>();
Reader r = new StringReader(text);
TokenStream ts = index.getTextAnalyzer().tokenStream("", r);
Token t = new Token();
try {
while ((t = ts.next(t)) != null) {
String termText = t.term();
- TermVectorOffsetInfo[] info =
- (TermVectorOffsetInfo[]) termMap.get(termText);
+ TermVectorOffsetInfo[] info = termMap.get(termText);
if (info == null) {
info = new TermVectorOffsetInfo[1];
} else {
@@ -266,7 +263,7 @@
public TermVectorOffsetInfo[] getOffsets(int index) {
TermVectorOffsetInfo[] info = TermVectorOffsetInfo.EMPTY_OFFSET_INFO;
if (index >= 0 && index < terms.length) {
- info = (TermVectorOffsetInfo[]) termMap.get(terms[index]);
+ info = termMap.get(terms[index]);
}
return info;
}
@@ -286,7 +283,7 @@
public int[] getTermFrequencies() {
int[] freqs = new int[terms.length];
for (int i = 0; i < terms.length; i++) {
- freqs[i] = ((TermVectorOffsetInfo[]) termMap.get(terms[i])).length;
+ freqs[i] = termMap.get(terms[i]).length;
}
return freqs;
}
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AbstractIndex.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AbstractIndex.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AbstractIndex.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AbstractIndex.java Tue Sep 8 16:09:28 2009
@@ -20,6 +20,7 @@
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
+import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.store.Directory;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -33,7 +34,7 @@
import java.io.PrintStream;
import java.io.StringReader;
import java.util.BitSet;
-import java.util.Iterator;
+import java.util.List;
/**
* Implements common functionality for a lucene index.
@@ -181,16 +182,16 @@
public Object call() throws Exception {
long time = System.currentTimeMillis();
writer.addDocument(doc);
- return new Long(System.currentTimeMillis() - time);
+ return System.currentTimeMillis() - time;
}
};
}
DynamicPooledExecutor.Result[] results = EXECUTOR.executeAndWait(commands);
invalidateSharedReader();
IOException ex = null;
- for (int i = 0; i < results.length; i++) {
- if (results[i].getException() != null) {
- Throwable cause = results[i].getException().getCause();
+ for (DynamicPooledExecutor.Result result : results) {
+ if (result.getException() != null) {
+ Throwable cause = result.getException().getCause();
if (ex == null) {
// only throw the first exception
if (cause instanceof IOException) {
@@ -203,7 +204,7 @@
log.warn("Exception while inverting document", cause);
}
} else {
- log.debug("Inverted document in {} ms", results[i].get());
+ log.debug("Inverted document in {} ms", result.get());
}
}
if (ex != null) {
@@ -238,7 +239,13 @@
indexWriter = null;
}
if (indexReader == null) {
- IndexReader reader = IndexReader.open(getDirectory());
+ IndexDeletionPolicy idp = getIndexDeletionPolicy();
+ IndexReader reader;
+ if (idp != null) {
+ reader = IndexReader.open(getDirectory(), idp);
+ } else {
+ reader = IndexReader.open(getDirectory());
+ }
reader.setTermInfosIndexDivisor(termInfosIndexDivisor);
indexReader = new CommittableIndexReader(reader);
}
@@ -246,6 +253,17 @@
}
/**
+ * Returns the index deletion policy for this index. This implementation
+ * always returns <code>null</code>.
+ *
+ * @return the index deletion policy for this index or <code>null</code> if
+ * none is present.
+ */
+ protected IndexDeletionPolicy getIndexDeletionPolicy() {
+ return null;
+ }
+
+ /**
* Returns a read-only index reader, that can be used concurrently with
* other threads writing to this index. The returned index reader is
* read-only, that is, any attempt to delete a document from the index
@@ -462,15 +480,14 @@
* @throws IOException if the document cannot be added to the indexing
* queue.
*/
+ @SuppressWarnings("unchecked")
private Document getFinishedDocument(Document doc) throws IOException {
if (!Util.isDocumentReady(doc)) {
Document copy = new Document();
// mark the document that reindexing is required
copy.add(new Field(FieldNames.REINDEXING_REQUIRED, "",
Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
- Iterator fields = doc.getFields().iterator();
- while (fields.hasNext()) {
- Fieldable f = (Fieldable) fields.next();
+ for (Fieldable f : (List<Fieldable>) doc.getFields()) {
Fieldable field = null;
Field.TermVector tv = getTermVectorParameter(f);
Field.Store stored = getStoreParameter(f);
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AggregateRuleImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AggregateRuleImpl.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AggregateRuleImpl.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AggregateRuleImpl.java Tue Sep 8 16:09:28 2009
@@ -42,7 +42,6 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Arrays;
-import java.util.Iterator;
/**
* <code>AggregateRule</code> defines a configuration for a node index
@@ -123,18 +122,16 @@
*/
public NodeState getAggregateRoot(NodeState nodeState)
throws ItemStateException, RepositoryException {
- for (int i = 0; i < nodeIncludes.length; i++) {
- NodeState aggregateRoot = nodeIncludes[i].matches(nodeState);
- if (aggregateRoot != null
- && aggregateRoot.getNodeTypeName().equals(nodeTypeName)) {
+ for (NodeInclude nodeInclude : nodeIncludes) {
+ NodeState aggregateRoot = nodeInclude.matches(nodeState);
+ if (aggregateRoot != null && aggregateRoot.getNodeTypeName().equals(nodeTypeName)) {
return aggregateRoot;
}
}
// check property includes
- for (int i = 0; i < propertyIncludes.length; i++) {
- NodeState aggregateRoot = propertyIncludes[i].matches(nodeState);
- if (aggregateRoot != null
- && aggregateRoot.getNodeTypeName().equals(nodeTypeName)) {
+ for (PropertyInclude propertyInclude : propertyIncludes) {
+ NodeState aggregateRoot = propertyInclude.matches(nodeState);
+ if (aggregateRoot != null && aggregateRoot.getNodeTypeName().equals(nodeTypeName)) {
return aggregateRoot;
}
}
@@ -154,12 +151,12 @@
public NodeState[] getAggregatedNodeStates(NodeState nodeState)
throws ItemStateException {
if (nodeState.getNodeTypeName().equals(nodeTypeName)) {
- List nodeStates = new ArrayList();
- for (int i = 0; i < nodeIncludes.length; i++) {
- nodeStates.addAll(Arrays.asList(nodeIncludes[i].resolve(nodeState)));
+ List<NodeState> nodeStates = new ArrayList<NodeState>();
+ for (NodeInclude nodeInclude : nodeIncludes) {
+ nodeStates.addAll(Arrays.asList(nodeInclude.resolve(nodeState)));
}
if (nodeStates.size() > 0) {
- return (NodeState[]) nodeStates.toArray(new NodeState[nodeStates.size()]);
+ return nodeStates.toArray(new NodeState[nodeStates.size()]);
}
}
return null;
@@ -171,14 +168,12 @@
public PropertyState[] getAggregatedPropertyStates(NodeState nodeState)
throws ItemStateException {
if (nodeState.getNodeTypeName().equals(nodeTypeName)) {
- List propStates = new ArrayList();
- for (int i = 0; i < propertyIncludes.length; i++) {
- propStates.addAll(Arrays.asList(
- propertyIncludes[i].resolvePropertyStates(nodeState)));
+ List<PropertyState> propStates = new ArrayList<PropertyState>();
+ for (PropertyInclude propertyInclude : propertyIncludes) {
+ propStates.addAll(Arrays.asList(propertyInclude.resolvePropertyStates(nodeState)));
}
if (propStates.size() > 0) {
- return (PropertyState[]) propStates.toArray(
- new PropertyState[propStates.size()]);
+ return propStates.toArray(new PropertyState[propStates.size()]);
}
}
return null;
@@ -216,7 +211,7 @@
*/
private NodeInclude[] getNodeIncludes(Node config)
throws MalformedPathException, IllegalNameException, NamespaceException {
- List includes = new ArrayList();
+ List<NodeInclude> includes = new ArrayList<NodeInclude>();
NodeList childNodes = config.getChildNodes();
for (int i = 0; i < childNodes.getLength(); i++) {
Node n = childNodes.item(i);
@@ -226,19 +221,18 @@
if (ntAttr != null) {
ntName = resolver.getQName(ntAttr.getNodeValue());
}
- String[] elements = Text.explode(getTextContent(n), '/');
PathBuilder builder = new PathBuilder();
- for (int j = 0; j < elements.length; j++) {
- if (elements[j].equals("*")) {
+ for (String element : Text.explode(getTextContent(n), '/')) {
+ if (element.equals("*")) {
builder.addLast(NameConstants.ANY_NAME);
} else {
- builder.addLast(resolver.getQName(elements[j]));
+ builder.addLast(resolver.getQName(element));
}
}
includes.add(new NodeInclude(builder.getPath(), ntName));
}
}
- return (NodeInclude[]) includes.toArray(new NodeInclude[includes.size()]);
+ return includes.toArray(new NodeInclude[includes.size()]);
}
/**
@@ -258,23 +252,22 @@
private PropertyInclude[] getPropertyIncludes(Node config) throws
MalformedPathException, IllegalNameException, NamespaceException,
RepositoryException {
- List includes = new ArrayList();
+ List<PropertyInclude> includes = new ArrayList<PropertyInclude>();
NodeList childNodes = config.getChildNodes();
for (int i = 0; i < childNodes.getLength(); i++) {
Node n = childNodes.item(i);
if (n.getNodeName().equals("include-property")) {
- String[] elements = Text.explode(getTextContent(n), '/');
PathBuilder builder = new PathBuilder();
- for (int j = 0; j < elements.length; j++) {
- if (elements[j].equals("*")) {
+ for (String element : Text.explode(getTextContent(n), '/')) {
+ if (element.equals("*")) {
throw new IllegalNameException("* not supported in include-property");
}
- builder.addLast(resolver.getQName(elements[j]));
+ builder.addLast(resolver.getQName(element));
}
includes.add(new PropertyInclude(builder.getPath()));
}
}
- return (PropertyInclude[]) includes.toArray(new PropertyInclude[includes.size()]);
+ return includes.toArray(new PropertyInclude[includes.size()]);
}
//---------------------------< internal >-----------------------------------
@@ -378,10 +371,10 @@
* @throws ItemStateException if an error occurs while accessing node
* states.
*/
- protected void resolve(NodeState nodeState, List collector, int offset)
+ protected void resolve(NodeState nodeState, List<NodeState> collector, int offset)
throws ItemStateException {
Name currentName = pattern.getElements()[offset].getName();
- List cne;
+ List<ChildNodeEntry> cne;
if (currentName.getLocalName().equals("*")) {
// matches all
cne = nodeState.getChildNodeEntries();
@@ -390,10 +383,8 @@
}
if (pattern.getLength() - 1 == offset) {
// last segment -> add to collector if node type matches
- Iterator it = cne.iterator();
- while (it.hasNext()) {
- NodeId id = ((ChildNodeEntry) it.next()).getId();
- NodeState ns = (NodeState) ism.getItemState(id);
+ for (ChildNodeEntry entry : cne) {
+ NodeState ns = (NodeState) ism.getItemState(entry.getId());
if (nodeTypeName == null || ns.getNodeTypeName().equals(nodeTypeName)) {
collector.add(ns);
}
@@ -401,9 +392,8 @@
} else {
// traverse
offset++;
- Iterator it = cne.iterator();
- while (it.hasNext()) {
- NodeId id = ((ChildNodeEntry) it.next()).getId();
+ for (ChildNodeEntry entry : cne) {
+ NodeId id = entry.getId();
resolve((NodeState) ism.getItemState(id), collector, offset);
}
}
@@ -433,9 +423,9 @@
* node states.
*/
NodeState[] resolve(NodeState nodeState) throws ItemStateException {
- List nodeStates = new ArrayList();
+ List<NodeState> nodeStates = new ArrayList<NodeState>();
resolve(nodeState, nodeStates, 0);
- return (NodeState[]) nodeStates.toArray(new NodeState[nodeStates.size()]);
+ return nodeStates.toArray(new NodeState[nodeStates.size()]);
}
}
@@ -459,18 +449,16 @@
*/
PropertyState[] resolvePropertyStates(NodeState nodeState)
throws ItemStateException {
- List nodeStates = new ArrayList();
+ List<NodeState> nodeStates = new ArrayList<NodeState>();
resolve(nodeState, nodeStates, 0);
- List propStates = new ArrayList();
- for (Iterator it = nodeStates.iterator(); it.hasNext(); ) {
- NodeState state = (NodeState) it.next();
+ List<PropertyState> propStates = new ArrayList<PropertyState>();
+ for (NodeState state : nodeStates) {
if (state.hasPropertyName(propertyName)) {
PropertyId propId = new PropertyId(state.getNodeId(), propertyName);
- propStates.add(ism.getItemState(propId));
+ propStates.add((PropertyState) ism.getItemState(propId));
}
}
- return (PropertyState[]) propStates.toArray(
- new PropertyState[propStates.size()]);
+ return propStates.toArray(new PropertyState[propStates.size()]);
}
}
}
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CachingIndexReader.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CachingIndexReader.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CachingIndexReader.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CachingIndexReader.java Tue Sep 8 16:09:28 2009
@@ -220,13 +220,12 @@
public Document document(int n, FieldSelector fieldSelector)
throws CorruptIndexException, IOException {
if (fieldSelector == FieldSelectors.UUID) {
- Integer docNum = new Integer(n);
Document doc;
- NodeId id = docNumber2id.get(docNum);
+ NodeId id = docNumber2id.get(n);
if (id == null) {
doc = super.document(n, fieldSelector);
id = new NodeId(doc.get(FieldNames.UUID));
- docNumber2id.put(docNum, id);
+ docNumber2id.put(n, id);
} else {
doc = new Document();
doc.add(new Field(FieldNames.UUID, id.toString(),
@@ -412,8 +411,8 @@
public void collect(Term term, TermDocs tDocs) throws IOException {
NodeId id = new NodeId(term.text());
while (tDocs.next()) {
- Integer docId = new Integer(tDocs.doc());
- NodeInfo info = (NodeInfo) docs.get(docId);
+ Integer docId = tDocs.doc();
+ NodeInfo info = docs.get(docId);
if (info == null) {
// shareable node, see above
} else {
@@ -454,8 +453,8 @@
}
log.debug("initialized {} DocIds in {} ms, {} foreign parents",
new Object[]{
- new Integer(parents.length),
- new Long(time),
+ parents.length,
+ time,
nf.format(foreignParents)
});
}
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CachingMultiIndexReader.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CachingMultiIndexReader.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CachingMultiIndexReader.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CachingMultiIndexReader.java Tue Sep 8 16:09:28 2009
@@ -162,8 +162,8 @@
* {@inheritDoc}
*/
protected synchronized void doClose() throws IOException {
- for (int i = 0; i < subReaders.length; i++) {
- subReaders[i].release();
+ for (ReadOnlyIndexReader subReader : subReaders) {
+ subReader.release();
}
}
@@ -185,12 +185,12 @@
Term term = new Term(FieldNames.UUID, id.toString());
int doc;
long tick;
- for (int i = 0; i < subReaders.length; i++) {
- TermDocs docs = subReaders[i].termDocs(term);
+ for (ReadOnlyIndexReader subReader : subReaders) {
+ TermDocs docs = subReader.termDocs(term);
try {
if (docs.next()) {
doc = docs.doc();
- tick = subReaders[i].getCreationTick();
+ tick = subReader.getCreationTick();
return new ForeignSegmentDocId(doc, tick);
}
} finally {
@@ -204,8 +204,7 @@
* {@inheritDoc}
*/
public int getDocumentNumber(ForeignSegmentDocId docId) {
- OffsetReader r = (OffsetReader) readersByCreationTick.get(
- new Long(docId.getCreationTick()));
+ OffsetReader r = readersByCreationTick.get(docId.getCreationTick());
if (r != null && !r.reader.isDeleted(docId.getDocNumber())) {
return r.offset + docId.getDocNumber();
}
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CaseTermQuery.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CaseTermQuery.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CaseTermQuery.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CaseTermQuery.java Tue Sep 8 16:09:28 2009
@@ -70,27 +70,22 @@
private final class CaseTermEnum extends FilteredTermEnum {
- private final int nameLength;
-
- private final OffsetCharSequence termText;
-
- private final OffsetCharSequence currentTerm;
-
CaseTermEnum(IndexReader reader) throws IOException {
// gather all terms that match
// keep them in order and remember the doc frequency as value
- final Map orderedTerms = new LinkedHashMap();
+ final Map<Term, Integer> orderedTerms =
+ new LinkedHashMap<Term, Integer>();
Term term = getTerm();
// there are always two range scanse: one with an initial
// lower case character and another one with an initial upper case
// character
- List rangeScans = new ArrayList(2);
- nameLength = FieldNames.getNameLength(term.text());
+ List<RangeScan> rangeScans = new ArrayList<RangeScan>(2);
+ int nameLength = FieldNames.getNameLength(term.text());
String propName = term.text().substring(0, nameLength);
- this.termText = new OffsetCharSequence(nameLength, term.text());
- this.currentTerm = new OffsetCharSequence(nameLength, term.text(), transform);
+ OffsetCharSequence termText = new OffsetCharSequence(nameLength, term.text());
+ OffsetCharSequence currentTerm = new OffsetCharSequence(nameLength, term.text(), transform);
try {
// start with a term using the lower case character for the first
@@ -125,16 +120,14 @@
rangeScans.add(new RangeScan(reader, term, term));
}
- Iterator it = rangeScans.iterator();
- while (it.hasNext()) {
- TermEnum terms = (TermEnum) it.next();
+ for (TermEnum terms : rangeScans) {
do {
Term t = terms.term();
if (t != null) {
currentTerm.setBase(t.text());
int compare = currentTerm.compareTo(termText);
if (compare == 0) {
- orderedTerms.put(t, new Integer(terms.docFreq()));
+ orderedTerms.put(t, terms.docFreq());
} else if (compare < 0) {
// try next one
} else {
@@ -146,9 +139,7 @@
} while (terms.next());
}
} finally {
- Iterator it = rangeScans.iterator();
- while (it.hasNext()) {
- TermEnum terms = (TermEnum) it.next();
+ for (TermEnum terms : rangeScans) {
try {
terms.close();
} catch (IOException e) {
@@ -157,7 +148,7 @@
}
}
- final Iterator it = orderedTerms.keySet().iterator();
+ final Iterator<Term> it = orderedTerms.keySet().iterator();
setEnum(new TermEnum() {
@@ -177,8 +168,8 @@
}
public int docFreq() {
- Integer docFreq = (Integer) orderedTerms.get(current);
- return docFreq != null ? docFreq.intValue() : 0;
+ Integer docFreq = orderedTerms.get(current);
+ return docFreq != null ? docFreq : 0;
}
public void close() {
@@ -186,7 +177,7 @@
}
private void getNext() {
- current = it.hasNext() ? (Term) it.next() : null;
+ current = it.hasNext() ? it.next() : null;
}
});
}
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ChildAxisQuery.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ChildAxisQuery.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ChildAxisQuery.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ChildAxisQuery.java Tue Sep 8 16:09:28 2009
@@ -27,7 +27,6 @@
import org.apache.jackrabbit.core.state.ChildNodeEntry;
import org.apache.jackrabbit.spi.Name;
import org.apache.jackrabbit.spi.commons.query.LocationStepQueryNode;
-import org.apache.jackrabbit.uuid.UUID;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
@@ -44,7 +43,6 @@
import org.slf4j.LoggerFactory;
import java.io.IOException;
-import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.HashMap;
@@ -452,18 +450,18 @@
calc[0] = new SimpleChildrenCalculator(reader, hResolver);
contextScorer.score(new HitCollector() {
- private List docIds = new ArrayList();
+ private List<Integer> docIds = new ArrayList<Integer>();
public void collect(int doc, float score) {
calc[0].collectContextHit(doc);
if (docIds != null) {
- docIds.add(new Integer(doc));
+ docIds.add(doc);
if (docIds.size() > CONTEXT_SIZE_THRESHOLD) {
// switch
calc[0] = new HierarchyResolvingChildrenCalculator(
reader, hResolver);
- for (Iterator it = docIds.iterator(); it.hasNext(); ) {
- calc[0].collectContextHit(((Integer) it.next()).intValue());
+ for (int docId : docIds) {
+ calc[0].collectContextHit(docId);
}
// indicate that we switched
docIds = null;
@@ -487,19 +485,17 @@
if (nameTest == null) {
// only select this node if it is the child at
// specified position
+ List<ChildNodeEntry> childNodes = state.getChildNodeEntries();
if (position == LocationStepQueryNode.LAST) {
// only select last
- List childNodes = state.getChildNodeEntries();
if (childNodes.size() == 0
- || !((ChildNodeEntry) childNodes.get(childNodes.size() - 1))
- .getId().equals(id)) {
+ || !(childNodes.get(childNodes.size() - 1)).getId().equals(id)) {
return false;
}
} else {
- List childNodes = state.getChildNodeEntries();
if (position < 1
|| childNodes.size() < position
- || !((ChildNodeEntry) childNodes.get(position - 1)).getId().equals(id)) {
+ || !(childNodes.get(position - 1)).getId().equals(id)) {
return false;
}
}
@@ -516,10 +512,9 @@
} else {
// only use the last one
Name name = entry.getName();
- List childNodes = state.getChildNodeEntries(name);
+ List<ChildNodeEntry> childNodes = state.getChildNodeEntries(name);
if (childNodes.size() == 0
- || !((ChildNodeEntry) childNodes.get(childNodes.size() - 1))
- .getId().equals(id)) {
+ || !(childNodes.get(childNodes.size() - 1)).getId().equals(id)) {
return false;
}
}
@@ -620,31 +615,30 @@
*/
public Hits getHits() throws IOException {
// read the uuids of the context nodes
- Map uuids = new HashMap();
+ Map<Integer, String> uuids = new HashMap<Integer, String>();
for (int i = contextHits.next(); i > -1; i = contextHits.next()) {
String uuid = reader.document(i, FieldSelectors.UUID).get(FieldNames.UUID);
- uuids.put(new Integer(i), uuid);
+ uuids.put(i, uuid);
}
// get child node entries for each hit
Hits childrenHits = new AdaptingHits();
- for (Iterator it = uuids.values().iterator(); it.hasNext(); ) {
- String uuid = (String) it.next();
+ for (String uuid : uuids.values()) {
NodeId id = new NodeId(uuid);
try {
long time = System.currentTimeMillis();
NodeState state = (NodeState) itemMgr.getItemState(id);
time = System.currentTimeMillis() - time;
- log.debug("got NodeState with id {} in {} ms.", id, new Long(time));
- Iterator entries;
+ log.debug("got NodeState with id {} in {} ms.", id, time);
+ List<ChildNodeEntry> entries;
if (nameTest != null) {
- entries = state.getChildNodeEntries(nameTest).iterator();
+ entries = state.getChildNodeEntries(nameTest);
} else {
// get all children
- entries = state.getChildNodeEntries().iterator();
+ entries = state.getChildNodeEntries();
}
- while (entries.hasNext()) {
- NodeId childId = ((ChildNodeEntry) entries.next()).getId();
+ for (ChildNodeEntry entry : entries) {
+ NodeId childId = entry.getId();
Term uuidTerm = new Term(FieldNames.UUID, childId.toString());
TermDocs docs = reader.termDocs(uuidTerm);
try {
@@ -674,7 +668,7 @@
/**
* The document numbers of the context hits.
*/
- private final Set docIds = new HashSet();
+ private final Set<Integer> docIds = new HashSet<Integer>();
/**
* Creates a new hierarchy resolving children calculator.
@@ -691,7 +685,7 @@
* {@inheritDoc}
*/
protected void collectContextHit(int doc) {
- docIds.add(new Integer(doc));
+ docIds.add(doc);
}
/**
@@ -706,12 +700,12 @@
docs = hResolver.getParents(h, docs);
if (docs.length == 1) {
// optimize single value
- if (docIds.contains(new Integer(docs[0]))) {
+ if (docIds.contains(docs[0])) {
childrenHits.set(h);
}
} else {
for (int i = 0; i < docs.length; i++) {
- if (docIds.contains(new Integer(docs[i]))) {
+ if (docIds.contains(docs[i])) {
childrenHits.set(h);
}
}
@@ -719,7 +713,7 @@
}
time = System.currentTimeMillis() - time;
- log.debug("Filtered hits in {} ms.", new Long(time));
+ log.debug("Filtered hits in {} ms.", time);
return childrenHits;
}
}
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ChildNodesQueryHits.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ChildNodesQueryHits.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ChildNodesQueryHits.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ChildNodesQueryHits.java Tue Sep 8 16:09:28 2009
@@ -87,7 +87,8 @@
/**
* Fetches the next {@link #childHits}
- * @throws IOException
+ *
+ * @throws IOException if an error occurs while reading from the index.
*/
private void fetchNextChildHits() throws IOException {
if (childHits != null) {
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ConsistencyCheck.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ConsistencyCheck.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ConsistencyCheck.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ConsistencyCheck.java Tue Sep 8 16:09:28 2009
@@ -44,7 +44,7 @@
* in the index for such a node are removed, and the node is re-indexed.</li>
* </ul>
*/
-class ConsistencyCheck {
+public class ConsistencyCheck {
/**
* Logger instance for this class
@@ -102,7 +102,7 @@
* repair failure.
* @throws IOException if a repair failure occurs.
*/
- void repair(boolean ignoreFailure) throws IOException {
+ public void repair(boolean ignoreFailure) throws IOException {
if (errors.size() == 0) {
log.info("No errors found.");
return;
@@ -137,7 +137,7 @@
* Returns the errors detected by the consistency check.
* @return the errors detected by the consistency check.
*/
- List<ConsistencyCheckError> getErrors() {
+ public List<ConsistencyCheckError> getErrors() {
return new ArrayList<ConsistencyCheckError>(errors);
}
@@ -233,7 +233,7 @@
node = parent;
}
for (int i = elements.size() - 1; i > -1; i--) {
- ChildNodeEntry entry = (ChildNodeEntry) elements.get(i);
+ ChildNodeEntry entry = elements.get(i);
path.append('/').append(entry.getName().getLocalName());
if (entry.getIndex() > 1) {
path.append('[').append(entry.getIndex()).append(']');
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ConsistencyCheckError.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ConsistencyCheckError.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ConsistencyCheckError.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ConsistencyCheckError.java Tue Sep 8 16:09:28 2009
@@ -23,7 +23,7 @@
/**
* Common base class for errors detected during the consistency check.
*/
-abstract class ConsistencyCheckError {
+public abstract class ConsistencyCheckError {
/**
* Diagnostic message for this error.
@@ -52,7 +52,7 @@
* Returns <code>true</code> if this error can be repaired.
* @return <code>true</code> if this error can be repaired.
*/
- abstract boolean repairable();
+ public abstract boolean repairable();
/**
* Executes the repair operation.
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DefaultHighlighter.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DefaultHighlighter.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DefaultHighlighter.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DefaultHighlighter.java Tue Sep 8 16:09:28 2009
@@ -23,6 +23,7 @@
import java.util.Set;
import java.util.Arrays;
import java.util.Collections;
+import java.util.List;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.TermPositionVector;
@@ -92,7 +93,7 @@
* highlighted
*/
public static String highlight(TermPositionVector tvec,
- Set queryTerms,
+ Set<Term> queryTerms,
String text,
String excerptStart,
String excerptEnd,
@@ -119,7 +120,7 @@
* highlighted
*/
public static String highlight(TermPositionVector tvec,
- Set queryTerms,
+ Set<Term> queryTerms,
String text,
int maxFragments,
int surround)
@@ -133,7 +134,7 @@
* @see #highlight(TermPositionVector, Set, String, String, String, String, String, String, String, int, int)
*/
protected String doHighlight(TermPositionVector tvec,
- Set queryTerms,
+ Set<Term> queryTerms,
String text,
String excerptStart,
String excerptEnd,
@@ -144,18 +145,18 @@
int maxFragments,
int surround) throws IOException {
String[] terms = new String[queryTerms.size()];
- Iterator it = queryTerms.iterator();
+ Iterator<Term> it = queryTerms.iterator();
for (int i = 0; it.hasNext(); i++) {
- terms[i] = ((Term) it.next()).text();
+ terms[i] = it.next().text();
}
- ArrayList list = new ArrayList();
+ List<TermVectorOffsetInfo> list = new ArrayList<TermVectorOffsetInfo>();
int[] tvecindexes = tvec.indexesOf(terms, 0, terms.length);
- for (int i = 0; i < tvecindexes.length; i++) {
- TermVectorOffsetInfo[] termoffsets = tvec.getOffsets(tvecindexes[i]);
+ for (int tvecindex : tvecindexes) {
+ TermVectorOffsetInfo[] termoffsets = tvec.getOffsets(tvecindex);
list.addAll(Arrays.asList(termoffsets));
}
- TermVectorOffsetInfo[] offsets = (TermVectorOffsetInfo[]) list.toArray(new TermVectorOffsetInfo[list.size()]);
+ TermVectorOffsetInfo[] offsets = list.toArray(new TermVectorOffsetInfo[list.size()]);
// sort offsets
if (terms.length > 1) {
Arrays.sort(offsets, new TermVectorOffsetInfoSorter());
@@ -182,7 +183,7 @@
fragmentStart, fragmentEnd, surround * 2);
}
int lastOffset = offsets.length; // Math.min(10, offsets.length); // 10 terms is plenty?
- ArrayList fragmentInfoList = new ArrayList();
+ List<FragmentInfo> fragmentInfoList = new ArrayList<FragmentInfo>();
if (offsets[0].getEndOffset() <= text.length()) {
FragmentInfo fi = new FragmentInfo(offsets[0], surround * 2);
for (int i = 1; i < lastOffset; i++) {
@@ -208,7 +209,7 @@
Collections.sort(fragmentInfoList, new FragmentInfoScoreSorter());
// extract best fragments
- ArrayList bestFragmentsList = new ArrayList();
+ List<FragmentInfo> bestFragmentsList = new ArrayList<FragmentInfo>();
for (int i = 0; i < Math.min(fragmentInfoList.size(), maxFragments); i++) {
bestFragmentsList.add(fragmentInfoList.get(i));
}
@@ -226,7 +227,7 @@
int skippedChars;
int firstWhitespace;
for (int i = 0; i < bestFragmentsList.size(); i++) {
- FragmentInfo fi = (FragmentInfo) bestFragmentsList.get(i);
+ FragmentInfo fi = bestFragmentsList.get(i);
fi.trim();
nextStart = fi.getStartOffset();
skip = nextStart - pos;
@@ -379,29 +380,31 @@
int min = excerpt.length();
char[] buf = new char[maxLength];
int len = reader.read(buf);
- excerpt.append(buf, 0, len);
+ StringBuffer tmp = new StringBuffer();
+ tmp.append(buf, 0, len);
if (len == buf.length) {
- for (int i = excerpt.length() - 1; i > min; i--) {
- if (Character.isWhitespace(excerpt.charAt(i))) {
- excerpt.delete(i, excerpt.length());
- excerpt.append(" ...");
+ for (int i = tmp.length() - 1; i > min; i--) {
+ if (Character.isWhitespace(tmp.charAt(i))) {
+ tmp.delete(i, tmp.length());
+ tmp.append(" ...");
break;
}
}
}
+ excerpt.append(Text.encodeIllegalXMLCharacters(tmp.toString()));
excerpt.append(fragmentEnd).append(excerptEnd);
return excerpt.toString();
}
private static class FragmentInfo {
- ArrayList offsetInfosList;
+ List<TermVectorOffsetInfo> offsetInfosList;
int startOffset;
int endOffset;
int mergeGap;
int numTerms;
public FragmentInfo(TermVectorOffsetInfo offsetinfo, int mergeGap) {
- offsetInfosList = new ArrayList();
+ offsetInfosList = new ArrayList<TermVectorOffsetInfo>();
offsetInfosList.add(offsetinfo);
startOffset = offsetinfo.getStartOffset();
endOffset = offsetinfo.getEndOffset();
@@ -437,9 +440,9 @@
public void trim() {
int end = startOffset + (mergeGap / 2);
- Iterator it = offsetInfosList.iterator();
+ Iterator<TermVectorOffsetInfo> it = offsetInfosList.iterator();
while (it.hasNext()) {
- TermVectorOffsetInfo tvoi = (TermVectorOffsetInfo) it.next();
+ TermVectorOffsetInfo tvoi = it.next();
if (tvoi.getStartOffset() > end) {
it.remove();
}
@@ -448,51 +451,42 @@
}
private static class FragmentInfoScoreSorter
- implements java.util.Comparator {
- public int compare(Object o1, Object o2) {
- int s1 = ((FragmentInfo) o1).numTerms();
- int s2 = ((FragmentInfo) o2).numTerms();
+ implements java.util.Comparator<FragmentInfo> {
+
+ public int compare(FragmentInfo o1, FragmentInfo o2) {
+ int s1 = o1.numTerms();
+ int s2 = o2.numTerms();
if (s1 == s2) {
- return ((FragmentInfo) o1).getStartOffset() < ((FragmentInfo) o2).getStartOffset() ? -1 : 1;
+ return o1.getStartOffset() < o2.getStartOffset() ? -1 : 1;
}
return s1 > s2 ? -1 : 1;
}
-
- public boolean equals(Object obj) {
- return false;
- }
}
private static class FragmentInfoPositionSorter
- implements java.util.Comparator {
- public int compare(Object o1, Object o2) {
- int s1 = ((FragmentInfo) o1).getStartOffset();
- int s2 = ((FragmentInfo) o2).getStartOffset();
+ implements java.util.Comparator<FragmentInfo> {
+
+ public int compare(FragmentInfo o1, FragmentInfo o2) {
+ int s1 = o1.getStartOffset();
+ int s2 = o2.getStartOffset();
if (s1 == s2) {
return 0;
}
return s1 < s2 ? -1 : 1;
}
-
- public boolean equals(Object obj) {
- return false;
- }
}
private static class TermVectorOffsetInfoSorter
- implements java.util.Comparator {
- public int compare(Object o1, Object o2) {
- int s1 = ((TermVectorOffsetInfo) o1).getStartOffset();
- int s2 = ((TermVectorOffsetInfo) o2).getStartOffset();
+ implements java.util.Comparator<TermVectorOffsetInfo> {
+
+ public int compare(TermVectorOffsetInfo o1, TermVectorOffsetInfo o2) {
+ int s1 = o1.getStartOffset();
+ int s2 = o2.getStartOffset();
if (s1 == s2) {
return 0;
}
return s1 < s2 ? -1 : 1;
}
-
- public boolean equals(Object obj) {
- return false;
- }
}
}
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DerefQuery.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DerefQuery.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DerefQuery.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DerefQuery.java Tue Sep 8 16:09:28 2009
@@ -31,7 +31,6 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.BitSet;
-import java.util.Iterator;
import java.util.List;
import java.util.Set;
@@ -238,7 +237,7 @@
/**
* List of UUIDs of selected nodes
*/
- private List uuids = null;
+ private List<String> uuids = null;
/**
* The next document id to return
@@ -305,11 +304,13 @@
* 3. find reference property UUIDs
* 4. Use UUIDs to find document number
* 5. Use the name test to filter the documents
- * @throws IOException
+ *
+ * @throws IOException if an exception occurs while reading from the
+ * index.
*/
private void calculateChildren() throws IOException {
if (uuids == null) {
- uuids = new ArrayList();
+ uuids = new ArrayList<String>();
contextScorer.score(new HitCollector() {
public void collect(int doc, float score) {
hits.set(doc);
@@ -334,9 +335,9 @@
// no reference properties at all on this node
continue;
}
- for (int v = 0; v < values.length; v++) {
- if (values[v].startsWith(prefix)) {
- uuids.add(values[v].substring(prefix.length()));
+ for (String value : values) {
+ if (value.startsWith(prefix)) {
+ uuids.add(value.substring(prefix.length()));
}
}
}
@@ -344,8 +345,8 @@
// collect the doc ids of all target nodes. we reuse the existing
// bitset.
hits.clear();
- for (Iterator it = uuids.iterator(); it.hasNext();) {
- TermDocs node = reader.termDocs(new Term(FieldNames.UUID, (String) it.next()));
+ for (String uuid : uuids) {
+ TermDocs node = reader.termDocs(new Term(FieldNames.UUID, uuid));
try {
while (node.next()) {
hits.set(node.doc());
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DescendantSelfAxisQuery.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DescendantSelfAxisQuery.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DescendantSelfAxisQuery.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DescendantSelfAxisQuery.java Tue Sep 8 16:09:28 2009
@@ -26,7 +26,6 @@
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.Sort;
import org.apache.jackrabbit.core.SessionImpl;
-import org.apache.jackrabbit.core.ItemManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -228,8 +227,8 @@
final SessionImpl session,
final Sort sort) throws IOException {
if (sort.getSort().length == 0 && subQueryMatchesAll()) {
- // maps path String to NodeId
- Map startingPoints = new TreeMap();
+ // maps path String to ScoreNode
+ Map<String, ScoreNode> startingPoints = new TreeMap<String, ScoreNode>();
QueryHits result = searcher.evaluate(getContextQuery());
try {
// minLevels 0 and 1 are handled with a series of
@@ -254,8 +253,8 @@
// prune overlapping starting points
String previousPath = null;
- for (Iterator it = startingPoints.keySet().iterator(); it.hasNext(); ) {
- String path = (String) it.next();
+ for (Iterator<String> it = startingPoints.keySet().iterator(); it.hasNext(); ) {
+ String path = it.next();
// if the previous path is a prefix of this path then the
// current path is obsolete
if (previousPath != null && path.startsWith(previousPath)) {
@@ -265,13 +264,11 @@
}
}
- final Iterator scoreNodes = startingPoints.values().iterator();
+ final Iterator<ScoreNode> scoreNodes = startingPoints.values().iterator();
return new AbstractQueryHits() {
private NodeTraversingQueryHits currentTraversal;
- private ItemManager itemMgr = session.getItemManager();
-
{
fetchNextTraversal();
}
@@ -300,7 +297,7 @@
currentTraversal.close();
}
if (scoreNodes.hasNext()) {
- ScoreNode sn = (ScoreNode) scoreNodes.next();
+ ScoreNode sn = scoreNodes.next();
try {
Node node = session.getNodeById(sn.getNodeId());
currentTraversal = new NodeTraversingQueryHits(node,
@@ -508,8 +505,8 @@
if (log.isDebugEnabled()) {
log.debug("Collected {} context hits in {} ms for {}",
new Object[]{
- new Integer(contextHits.cardinality()),
- new Long(time),
+ contextHits.cardinality(),
+ time,
DescendantSelfAxisQuery.this
});
}
@@ -553,8 +550,8 @@
// traverse
while (pDocs.length != 0) {
boolean valid = false;
- for (int i = 0; i < pDocs.length; i++) {
- if (ancestorCount >= minLevels && contextHits.get(pDocs[i])) {
+ for (int pDoc : pDocs) {
+ if (ancestorCount >= minLevels && contextHits.get(pDoc)) {
valid = true;
break;
}
@@ -604,8 +601,8 @@
return hResolver.getParents(docs[0], pDocs);
} else {
pDocs = new int[0];
- for (int i = 0; i < docs.length; i++) {
- int[] p = hResolver.getParents(docs[i], new int[0]);
+ for (int doc : docs) {
+ int[] p = hResolver.getParents(doc, new int[0]);
int[] tmp = new int[p.length + pDocs.length];
System.arraycopy(pDocs, 0, tmp, 0, pDocs.length);
System.arraycopy(p, 0, tmp, pDocs.length, p.length);
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocId.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocId.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocId.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocId.java Tue Sep 8 16:09:28 2009
@@ -344,10 +344,10 @@
public String toString() {
StringBuffer sb = new StringBuffer("MultiUUIDDocId(");
String separator = "";
- for (int i = 0; i < docIds.length; i++) {
+ for (UUIDDocId docId : docIds) {
sb.append(separator);
separator = ", ";
- sb.append(docIds[i].id);
+ sb.append(docId.id);
}
sb.append(")");
return sb.toString();
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocNumberCache.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocNumberCache.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocNumberCache.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocNumberCache.java Tue Sep 8 16:09:28 2009
@@ -17,7 +17,6 @@
package org.apache.jackrabbit.core.query.lucene;
import org.apache.commons.collections.map.LRUMap;
-import org.apache.jackrabbit.uuid.UUID;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -94,9 +93,8 @@
*/
void put(String uuid, CachingIndexReader reader, int n) {
LRUMap cacheSegment = docNumbers[getSegmentIndex(uuid.charAt(0))];
- UUID key = UUID.fromString(uuid);
synchronized (cacheSegment) {
- Entry e = (Entry) cacheSegment.get(key);
+ Entry e = (Entry) cacheSegment.get(uuid);
if (e != null) {
// existing entry
// ignore if reader is older than the one in entry
@@ -114,7 +112,7 @@
}
if (e != null) {
- cacheSegment.put(key, e);
+ cacheSegment.put(uuid, e);
}
}
}
@@ -128,14 +126,9 @@
*/
Entry get(String uuid) {
LRUMap cacheSegment = docNumbers[getSegmentIndex(uuid.charAt(0))];
- // uuid may be invalid
- if (uuid.length() != UUID.UUID_FORMATTED_LENGTH) {
- return null;
- }
- UUID key = UUID.fromString(uuid);
Entry entry;
synchronized (cacheSegment) {
- entry = (Entry) cacheSegment.get(key);
+ entry = (Entry) cacheSegment.get(uuid);
}
if (log.isInfoEnabled()) {
accesses++;
@@ -150,8 +143,8 @@
}
StringBuffer statistics = new StringBuffer();
int inUse = 0;
- for (int i = 0; i < docNumbers.length; i++) {
- inUse += docNumbers[i].size();
+ for (LRUMap docNumber : docNumbers) {
+ inUse += docNumber.size();
}
statistics.append("size=").append(inUse);
statistics.append("/").append(docNumbers[0].maxSize() * CACHE_SEGMENTS);
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocOrderScoreNodeIterator.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocOrderScoreNodeIterator.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocOrderScoreNodeIterator.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocOrderScoreNodeIterator.java Tue Sep 8 16:09:28 2009
@@ -18,6 +18,7 @@
import org.apache.jackrabbit.core.ItemManager;
import org.apache.jackrabbit.core.NodeImpl;
+import org.apache.jackrabbit.core.id.NodeId;
import org.apache.jackrabbit.spi.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -42,7 +43,7 @@
private ScoreNodeIterator orderedNodes;
/** Unordered list of {@link ScoreNode}[]s. */
- private final List scoreNodes;
+ private final List<ScoreNode[]> scoreNodes;
/** ItemManager to turn UUIDs into Node instances */
protected final ItemManager itemMgr;
@@ -64,8 +65,8 @@
* selectorIndex.
*/
DocOrderScoreNodeIterator(ItemManager itemMgr,
- List scoreNodes,
- int selectorIndex) {
+ List<ScoreNode[]> scoreNodes,
+ int selectorIndex) {
this.itemMgr = itemMgr;
this.scoreNodes = scoreNodes;
this.selectorIndex = selectorIndex;
@@ -147,29 +148,29 @@
return;
}
long time = System.currentTimeMillis();
- ScoreNode[][] nodes = (ScoreNode[][]) scoreNodes.toArray(new ScoreNode[scoreNodes.size()][]);
+ ScoreNode[][] nodes = scoreNodes.toArray(new ScoreNode[scoreNodes.size()][]);
- final List invalidIDs = new ArrayList(2);
+ final List<NodeId> invalidIDs = new ArrayList<NodeId>(2);
do {
if (invalidIDs.size() > 0) {
// previous sort run was not successful -> remove failed uuids
- List tmp = new ArrayList();
- for (int i = 0; i < nodes.length; i++) {
- if (!invalidIDs.contains(nodes[i][selectorIndex].getNodeId())) {
- tmp.add(nodes[i]);
+ List<ScoreNode[]> tmp = new ArrayList<ScoreNode[]>();
+ for (ScoreNode[] node : nodes) {
+ if (!invalidIDs.contains(node[selectorIndex].getNodeId())) {
+ tmp.add(node);
}
}
- nodes = (ScoreNode[][]) tmp.toArray(new ScoreNode[tmp.size()][]);
+ nodes = tmp.toArray(new ScoreNode[tmp.size()][]);
invalidIDs.clear();
}
try {
// sort the uuids
- Arrays.sort(nodes, new Comparator() {
- public int compare(Object o1, Object o2) {
- ScoreNode n1 = ((ScoreNode[]) o1)[selectorIndex];
- ScoreNode n2 = ((ScoreNode[]) o2)[selectorIndex];
+ Arrays.sort(nodes, new Comparator<ScoreNode[]>() {
+ public int compare(ScoreNode[] o1, ScoreNode[] o2) {
+ ScoreNode n1 = o1[selectorIndex];
+ ScoreNode n2 = o2[selectorIndex];
// handle null values
// null is considered less than any value
if (n1 == n2) {
Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/FileBasedNamespaceMappings.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/FileBasedNamespaceMappings.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/FileBasedNamespaceMappings.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/FileBasedNamespaceMappings.java Tue Sep 8 16:09:28 2009
@@ -29,7 +29,6 @@
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
@@ -58,12 +57,12 @@
/**
* Map of uris indexed by prefixes
*/
- private Map prefixToURI = new HashMap();
+ private Map<String, String> prefixToURI = new HashMap<String, String>();
/**
* Map of prefixes indexed by uris
*/
- private Map uriToPrefix = new HashMap();
+ private Map<String, String> uriToPrefix = new HashMap<String, String>();
/**
* Current prefix count.
@@ -95,7 +94,7 @@
if (!prefixToURI.containsKey(prefix)) {
throw new NamespaceException(prefix + ": is not a registered namespace prefix.");
}
- return (String) prefixToURI.get(prefix);
+ return prefixToURI.get(prefix);
}
/**
@@ -109,7 +108,7 @@
* mapping could not be stored.
*/
public synchronized String getPrefix(String uri) throws NamespaceException {
- String prefix = (String) uriToPrefix.get(uri);
+ String prefix = uriToPrefix.get(uri);
if (prefix == null) {
// make sure prefix is not taken
while (prefixToURI.get(String.valueOf(prefixCount)) != null) {
@@ -144,9 +143,8 @@
props.load(in);
// read mappings from properties
- Iterator iter = props.keySet().iterator();
- while (iter.hasNext()) {
- String prefix = (String) iter.next();
+ for (Object o : props.keySet()) {
+ String prefix = (String) o;
String uri = props.getProperty(prefix);
log.debug(prefix + " -> " + uri);
prefixToURI.put(prefix, uri);
@@ -169,12 +167,7 @@
Properties props = new Properties();
// store mappings in properties
- Iterator iter = prefixToURI.keySet().iterator();
- while (iter.hasNext()) {
- String prefix = (String) iter.next();
- String uri = (String) prefixToURI.get(prefix);
- props.setProperty(prefix, uri);
- }
+ props.putAll(prefixToURI);
OutputStream out = new FileOutputStream(storage);
try {