You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ja...@apache.org on 2013/07/04 01:26:45 UTC
svn commit: r1499601 [14/20] - in /lucene/dev/branches/security: ./
dev-tools/ dev-tools/eclipse/ dev-tools/idea/.idea/libraries/
dev-tools/idea/solr/core/src/test/ dev-tools/maven/ dev-tools/maven/lucene/
dev-tools/maven/lucene/analysis/stempel/ dev-t...
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/DoubleField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/DoubleField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/DoubleField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/DoubleField.java Wed Jul 3 23:26:32 2013
@@ -17,18 +17,20 @@
package org.apache.solr.schema;
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.lucene.index.StorableField;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
-import org.apache.lucene.index.GeneralField;
-import org.apache.lucene.index.IndexableField;
-import org.apache.lucene.index.StorableField;
+import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField;
+import org.apache.lucene.util.BytesRef;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
-import java.io.IOException;
-import java.util.Map;
-
/**
* A legacy numeric field type that encodes "Double" values as simple Strings.
* This class should not be used except by people with existing indexes that
@@ -43,7 +45,21 @@ import java.util.Map;
*
* @see TrieDoubleField
*/
-public class DoubleField extends PrimitiveFieldType {
+public class DoubleField extends PrimitiveFieldType implements DoubleValueFieldType {
+
+ private static final FieldCache.DoubleParser PARSER = new FieldCache.DoubleParser() {
+
+ @Override
+ public TermsEnum termsEnum(Terms terms) throws IOException {
+ return terms.iterator(null);
+ }
+
+ @Override
+ public double parseDouble(BytesRef term) {
+ return Double.parseDouble(term.utf8ToString());
+ }
+ };
+
@Override
protected void init(IndexSchema schema, Map<String, String> args) {
super.init(schema, args);
@@ -54,13 +70,13 @@ public class DoubleField extends Primiti
@Override
public SortField getSortField(SchemaField field, boolean reverse) {
field.checkSortability();
- return new SortField(field.name, SortField.Type.DOUBLE, reverse);
+ return new SortField(field.name, PARSER, reverse);
}
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
- return new DoubleFieldSource(field.name);
+ return new DoubleFieldSource(field.name, PARSER);
}
@Override
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/FieldType.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/FieldType.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/FieldType.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/FieldType.java Wed Jul 3 23:26:32 2013
@@ -17,18 +17,6 @@
package org.apache.solr.schema;
-import java.io.IOException;
-import java.io.Reader;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import static org.apache.lucene.analysis.util.AbstractAnalysisFactory.LUCENE_MATCH_VERSION_PARAM;
-
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
@@ -67,6 +55,18 @@ import org.apache.solr.search.Sorting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.apache.lucene.analysis.util.AbstractAnalysisFactory.LUCENE_MATCH_VERSION_PARAM;
+
/**
* Base class for all field types used by an index schema.
*
@@ -799,6 +799,15 @@ public abstract class FieldType extends
namedPropertyValues.add(getPropertyName(TOKENIZED), isTokenized());
// The BINARY property is always false
// namedPropertyValues.add(getPropertyName(BINARY), hasProperty(BINARY));
+ if (null != getSimilarityFactory()) {
+ namedPropertyValues.add(SIMILARITY, getSimilarityFactory().getNamedPropertyValues());
+ }
+ if (null != getPostingsFormat()) {
+ namedPropertyValues.add(POSTINGS_FORMAT, getPostingsFormat());
+ }
+ if (null != getDocValuesFormat()) {
+ namedPropertyValues.add(DOC_VALUES_FORMAT, getDocValuesFormat());
+ }
} else { // Don't show defaults
Set<String> fieldProperties = new HashSet<String>();
for (String propertyName : FieldProperties.propertyNames) {
@@ -826,15 +835,7 @@ public abstract class FieldType extends
namedPropertyValues.add(MULTI_TERM_ANALYZER, getAnalyzerProperties(((TextField) this).getMultiTermAnalyzer()));
}
}
- if (null != getSimilarityFactory()) {
- namedPropertyValues.add(SIMILARITY, getSimilarityFactory().getNamedPropertyValues());
- }
- if (null != getPostingsFormat()) {
- namedPropertyValues.add(POSTINGS_FORMAT, getPostingsFormat());
- }
- if (null != getDocValuesFormat()) {
- namedPropertyValues.add(DOC_VALUES_FORMAT, getDocValuesFormat());
- }
+
return namedPropertyValues;
}
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/FloatField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/FloatField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/FloatField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/FloatField.java Wed Jul 3 23:26:32 2013
@@ -19,11 +19,15 @@ package org.apache.solr.schema;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
+import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField;
+import org.apache.lucene.util.BytesRef;
import org.apache.solr.search.QParser;
import org.apache.lucene.index.GeneralField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
import org.apache.solr.response.TextResponseWriter;
import java.util.Map;
@@ -42,7 +46,21 @@ import java.io.IOException;
*
* @see TrieFloatField
*/
-public class FloatField extends PrimitiveFieldType {
+public class FloatField extends PrimitiveFieldType implements FloatValueFieldType {
+
+ private static final FieldCache.FloatParser PARSER = new FieldCache.FloatParser() {
+
+ @Override
+ public TermsEnum termsEnum(Terms terms) throws IOException {
+ return terms.iterator(null);
+ }
+
+ @Override
+ public float parseFloat(BytesRef term) {
+ return Float.parseFloat(term.utf8ToString());
+ }
+ };
+
@Override
protected void init(IndexSchema schema, Map<String,String> args) {
super.init(schema, args);
@@ -58,7 +76,7 @@ public class FloatField extends Primitiv
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
- return new FloatFieldSource(field.name);
+ return new FloatFieldSource(field.name, PARSER);
}
@Override
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/IndexSchema.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/IndexSchema.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/IndexSchema.java Wed Jul 3 23:26:32 2013
@@ -311,8 +311,8 @@ public class IndexSchema {
*/
public SchemaField getUniqueKeyField() { return uniqueKeyField; }
- private String uniqueKeyFieldName;
- private FieldType uniqueKeyFieldType;
+ protected String uniqueKeyFieldName;
+ protected FieldType uniqueKeyFieldType;
/**
* The raw (field type encoded) value of the Unique Key field for
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/IntField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/IntField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/IntField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/IntField.java Wed Jul 3 23:26:32 2013
@@ -19,11 +19,15 @@ package org.apache.solr.schema;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
+import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField;
+import org.apache.lucene.util.BytesRef;
import org.apache.solr.search.QParser;
import org.apache.lucene.index.GeneralField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
import org.apache.solr.response.TextResponseWriter;
import java.util.Map;
@@ -42,7 +46,21 @@ import java.io.IOException;
*
* @see TrieIntField
*/
-public class IntField extends PrimitiveFieldType {
+public class IntField extends PrimitiveFieldType implements IntValueFieldType {
+
+ private static final FieldCache.IntParser PARSER = new FieldCache.IntParser() {
+
+ @Override
+ public TermsEnum termsEnum(Terms terms) throws IOException {
+ return terms.iterator(null);
+ }
+
+ @Override
+ public int parseInt(BytesRef term) {
+ return Integer.parseInt(term.utf8ToString());
+ }
+ };
+
@Override
protected void init(IndexSchema schema, Map<String,String> args) {
super.init(schema, args);
@@ -52,13 +70,13 @@ public class IntField extends PrimitiveF
@Override
public SortField getSortField(SchemaField field,boolean reverse) {
field.checkSortability();
- return new SortField(field.name,SortField.Type.INT, reverse);
+ return new SortField(field.name, PARSER, reverse);
}
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
- return new IntFieldSource(field.name);
+ return new IntFieldSource(field.name, PARSER);
}
@Override
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/LongField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/LongField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/LongField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/LongField.java Wed Jul 3 23:26:32 2013
@@ -22,7 +22,11 @@ import org.apache.lucene.queries.functio
import org.apache.lucene.index.GeneralField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField;
+import org.apache.lucene.util.BytesRef;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
@@ -42,7 +46,21 @@ import java.util.Map;
*
* @see TrieLongField
*/
-public class LongField extends PrimitiveFieldType {
+public class LongField extends PrimitiveFieldType implements LongValueFieldType {
+
+ private static final FieldCache.LongParser PARSER = new FieldCache.LongParser() {
+
+ @Override
+ public TermsEnum termsEnum(Terms terms) throws IOException {
+ return terms.iterator(null);
+ }
+
+ @Override
+ public long parseLong(BytesRef term) {
+ return Long.parseLong(term.utf8ToString());
+ }
+ };
+
@Override
protected void init(IndexSchema schema, Map<String,String> args) {
super.init(schema, args);
@@ -54,13 +72,13 @@ public class LongField extends Primitive
@Override
public SortField getSortField(SchemaField field,boolean reverse) {
field.checkSortability();
- return new SortField(field.name,SortField.Type.LONG, reverse);
+ return new SortField(field.name, PARSER, reverse);
}
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
- return new LongFieldSource(field.name);
+ return new LongFieldSource(field.name, PARSER);
}
@Override
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java Wed Jul 3 23:26:32 2013
@@ -169,6 +169,12 @@ public final class ManagedIndexSchema ex
return addFields(Arrays.asList(newField));
}
+ public class FieldExistsException extends SolrException {
+ public FieldExistsException(ErrorCode code, String msg) {
+ super(code, msg);
+ }
+ }
+
@Override
public ManagedIndexSchema addFields(Collection<SchemaField> newFields) {
ManagedIndexSchema newSchema = null;
@@ -183,7 +189,7 @@ public final class ManagedIndexSchema ex
for (SchemaField newField : newFields) {
if (null != newSchema.getFieldOrNull(newField.getName())) {
String msg = "Field '" + newField.getName() + "' already exists.";
- throw new SolrException(ErrorCode.BAD_REQUEST, msg);
+ throw new FieldExistsException(ErrorCode.BAD_REQUEST, msg);
}
newSchema.fields.put(newField.getName(), newField);
@@ -328,6 +334,8 @@ public final class ManagedIndexSchema ex
newSchema.similarityFactory = similarityFactory;
newSchema.isExplicitSimilarity = isExplicitSimilarity;
newSchema.uniqueKeyField = uniqueKeyField;
+ newSchema.uniqueKeyFieldName = uniqueKeyFieldName;
+ newSchema.uniqueKeyFieldType = uniqueKeyFieldType;
if (includeFieldDataStructures) {
// These need new collections, since addFields() can add members to them
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableDoubleField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableDoubleField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableDoubleField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableDoubleField.java Wed Jul 3 23:26:32 2013
@@ -54,7 +54,7 @@ import java.io.IOException;
* @deprecated use {@link DoubleField} or {@link TrieDoubleField} - will be removed in 5.x
*/
@Deprecated
-public class SortableDoubleField extends PrimitiveFieldType {
+public class SortableDoubleField extends PrimitiveFieldType implements DoubleValueFieldType {
@Override
public SortField getSortField(SchemaField field,boolean reverse) {
return getStringSort(field,reverse);
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableFloatField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableFloatField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableFloatField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableFloatField.java Wed Jul 3 23:26:32 2013
@@ -55,7 +55,7 @@ import java.io.IOException;
* @deprecated use {@link FloatField} or {@link TrieFloatField} - will be removed in 5.x
*/
@Deprecated
-public class SortableFloatField extends PrimitiveFieldType {
+public class SortableFloatField extends PrimitiveFieldType implements FloatValueFieldType {
@Override
public SortField getSortField(SchemaField field,boolean reverse) {
return getStringSort(field,reverse);
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableIntField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableIntField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableIntField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/SortableIntField.java Wed Jul 3 23:26:32 2013
@@ -55,7 +55,7 @@ import java.io.IOException;
* @deprecated use {@link IntField} or {@link TrieIntField} - will be removed in 5.x
*/
@Deprecated
-public class SortableIntField extends PrimitiveFieldType {
+public class SortableIntField extends PrimitiveFieldType implements IntValueFieldType {
@Override
public SortField getSortField(SchemaField field,boolean reverse) {
return getStringSort(field,reverse);
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieDateField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieDateField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieDateField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieDateField.java Wed Jul 3 23:26:32 2013
@@ -55,7 +55,7 @@ import java.io.IOException;
* @see DateField
* @see TrieField
*/
-public class TrieDateField extends DateField {
+public class TrieDateField extends DateField implements DateValueFieldType {
final TrieField wrappedField = new TrieField() {{
type = TrieTypes.DATE;
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieDoubleField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieDoubleField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieDoubleField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieDoubleField.java Wed Jul 3 23:26:32 2013
@@ -33,7 +33,7 @@ package org.apache.solr.schema;
* @see Double
* @see <a href="http://java.sun.com/docs/books/jls/third_edition/html/typesValues.html#4.2.3">Java Language Specification, s4.2.3</a>
*/
-public class TrieDoubleField extends TrieField {
+public class TrieDoubleField extends TrieField implements DoubleValueFieldType {
{
type=TrieTypes.DOUBLE;
}
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieFloatField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieFloatField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieFloatField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieFloatField.java Wed Jul 3 23:26:32 2013
@@ -33,7 +33,7 @@ package org.apache.solr.schema;
* @see Float
* @see <a href="http://java.sun.com/docs/books/jls/third_edition/html/typesValues.html#4.2.3">Java Language Specification, s4.2.3</a>
*/
-public class TrieFloatField extends TrieField {
+public class TrieFloatField extends TrieField implements FloatValueFieldType {
{
type=TrieTypes.FLOAT;
}
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieIntField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieIntField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieIntField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieIntField.java Wed Jul 3 23:26:32 2013
@@ -27,7 +27,7 @@ package org.apache.solr.schema;
*
* @see Integer
*/
-public class TrieIntField extends TrieField {
+public class TrieIntField extends TrieField implements IntValueFieldType {
{
type=TrieTypes.INTEGER;
}
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieLongField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieLongField.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieLongField.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/schema/TrieLongField.java Wed Jul 3 23:26:32 2013
@@ -27,7 +27,7 @@ package org.apache.solr.schema;
*
* @see Long
*/
-public class TrieLongField extends TrieField {
+public class TrieLongField extends TrieField implements LongValueFieldType {
{
type=TrieTypes.LONG;
}
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java Wed Jul 3 23:26:32 2013
@@ -193,7 +193,13 @@ public class SolrIndexSearcher extends I
this.name = "Searcher@" + Integer.toHexString(hashCode()) + (name!=null ? " "+name : "");
log.info("Opening " + this.name);
- Directory dir = this.reader.directory();
+ if (directoryFactory.searchersReserveCommitPoints()) {
+ // reserve commit point for life of searcher
+ core.getDeletionPolicy().saveCommitPoint(
+ reader.getIndexCommit().getGeneration());
+ }
+
+ Directory dir = getIndexReader().directory();
this.reserveDirectory = reserveDirectory;
this.createdDirectory = r == null;
@@ -331,12 +337,18 @@ public class SolrIndexSearcher extends I
// super.close();
// can't use super.close() since it just calls reader.close() and that may only be called once
// per reader (even if incRef() was previously called).
+
+ long cpg = reader.getIndexCommit().getGeneration();
try {
if (closeReader) reader.decRef();
} catch (Throwable t) {
SolrException.log(log, "Problem dec ref'ing reader", t);
}
+ if (directoryFactory.searchersReserveCommitPoints()) {
+ core.getDeletionPolicy().releaseCommitPoint(cpg);
+ }
+
for (SolrCache cache : cacheList) {
cache.close();
}
@@ -1095,41 +1107,12 @@ public class SolrIndexSearcher extends I
DocSetCollector collector = new DocSetCollector(maxDoc()>>6, maxDoc());
if (filter==null) {
- if (query instanceof TermQuery) {
- Term t = ((TermQuery)query).getTerm();
- for (final AtomicReaderContext leaf : leafContexts) {
- final AtomicReader reader = leaf.reader();
- collector.setNextReader(leaf);
- Fields fields = reader.fields();
- Terms terms = fields.terms(t.field());
- BytesRef termBytes = t.bytes();
-
- Bits liveDocs = reader.getLiveDocs();
- DocsEnum docsEnum = null;
- if (terms != null) {
- final TermsEnum termsEnum = terms.iterator(null);
- if (termsEnum.seekExact(termBytes, false)) {
- docsEnum = termsEnum.docs(liveDocs, null, DocsEnum.FLAG_NONE);
- }
- }
-
- if (docsEnum != null) {
- int docid;
- while ((docid = docsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
- collector.collect(docid);
- }
- }
- }
- } else {
- super.search(query,null,collector);
- }
- return collector.getDocSet();
-
+ super.search(query,null,collector);
} else {
Filter luceneFilter = filter.getTopFilter();
super.search(query, luceneFilter, collector);
- return collector.getDocSet();
}
+ return collector.getDocSet();
}
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java Wed Jul 3 23:26:32 2013
@@ -293,7 +293,7 @@ public abstract class ValueSourceParser
}
});
- addParser("geodist", HaversineConstFunction.parser);
+ addParser("geodist", new GeoDistValueSourceParser());
addParser("hsin", new ValueSourceParser() {
@Override
@@ -309,18 +309,8 @@ public abstract class ValueSourceParser
ValueSource one = fp.parseValueSource();
ValueSource two = fp.parseValueSource();
if (fp.hasMoreArguments()) {
-
-
- List<ValueSource> s1 = new ArrayList<ValueSource>();
- s1.add(one);
- s1.add(two);
- pv1 = new VectorValueSource(s1);
- ValueSource x2 = fp.parseValueSource();
- ValueSource y2 = fp.parseValueSource();
- List<ValueSource> s2 = new ArrayList<ValueSource>();
- s2.add(x2);
- s2.add(y2);
- pv2 = new VectorValueSource(s2);
+ pv1 = new VectorValueSource(Arrays.asList(one, two));//x1, y1
+ pv2 = new VectorValueSource(Arrays.asList(fp.parseValueSource(), fp.parseValueSource()));//x2, y2
} else {
//check to see if we have multiValue source
if (one instanceof MultiValueSource && two instanceof MultiValueSource){
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java Wed Jul 3 23:26:32 2013
@@ -16,27 +16,15 @@ package org.apache.solr.search.function.
* limitations under the License.
*/
+import com.spatial4j.core.distance.DistanceUtils;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
-import org.apache.lucene.queries.function.valuesource.ConstNumberSource;
-import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource;
-import org.apache.lucene.queries.function.valuesource.MultiValueSource;
import org.apache.lucene.queries.function.valuesource.VectorValueSource;
import org.apache.lucene.search.IndexSearcher;
-import com.spatial4j.core.io.ParseUtils;
-import com.spatial4j.core.distance.DistanceUtils;
-import com.spatial4j.core.exception.InvalidShapeException;
-import org.apache.solr.common.params.SpatialParams;
-import org.apache.solr.schema.SchemaField;
-import org.apache.solr.search.FunctionQParser;
-import org.apache.solr.search.SyntaxError;
-import org.apache.solr.search.ValueSourceParser;
import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
import java.util.Map;
import static com.spatial4j.core.distance.DistanceUtils.DEGREES_TO_RADIANS;
@@ -46,135 +34,6 @@ import static com.spatial4j.core.distanc
*/
public class HaversineConstFunction extends ValueSource {
- public static ValueSourceParser parser = new ValueSourceParser() {
- @Override
- public ValueSource parse(FunctionQParser fp) throws SyntaxError
- {
- // TODO: dispatch through SpatialQueryable in the future?
- List<ValueSource> sources = fp.parseValueSourceList();
-
- // "m" is a multi-value source, "x" is a single-value source
- // allow (m,m) (m,x,x) (x,x,m) (x,x,x,x)
- // if not enough points are present, "pt" will be checked first, followed by "sfield".
-
- MultiValueSource mv1 = null;
- MultiValueSource mv2 = null;
-
- if (sources.size() == 0) {
- // nothing to do now
- } else if (sources.size() == 1) {
- ValueSource vs = sources.get(0);
- if (!(vs instanceof MultiValueSource)) {
- throw new SyntaxError("geodist - invalid parameters:" + sources);
- }
- mv1 = (MultiValueSource)vs;
- } else if (sources.size() == 2) {
- ValueSource vs1 = sources.get(0);
- ValueSource vs2 = sources.get(1);
-
- if (vs1 instanceof MultiValueSource && vs2 instanceof MultiValueSource) {
- mv1 = (MultiValueSource)vs1;
- mv2 = (MultiValueSource)vs2;
- } else {
- mv1 = makeMV(sources, sources);
- }
- } else if (sources.size()==3) {
- ValueSource vs1 = sources.get(0);
- ValueSource vs2 = sources.get(1);
- if (vs1 instanceof MultiValueSource) { // (m,x,x)
- mv1 = (MultiValueSource)vs1;
- mv2 = makeMV(sources.subList(1,3), sources);
- } else { // (x,x,m)
- mv1 = makeMV(sources.subList(0,2), sources);
- vs1 = sources.get(2);
- if (!(vs1 instanceof MultiValueSource)) {
- throw new SyntaxError("geodist - invalid parameters:" + sources);
- }
- mv2 = (MultiValueSource)vs1;
- }
- } else if (sources.size()==4) {
- mv1 = makeMV(sources.subList(0,2), sources);
- mv2 = makeMV(sources.subList(2,4), sources);
- } else if (sources.size() > 4) {
- throw new SyntaxError("geodist - invalid parameters:" + sources);
- }
-
- if (mv1 == null) {
- mv1 = parsePoint(fp);
- mv2 = parseSfield(fp);
- } else if (mv2 == null) {
- mv2 = parsePoint(fp);
- if (mv2 == null)
- mv2 = parseSfield(fp);
- }
-
- if (mv1 == null || mv2 == null) {
- throw new SyntaxError("geodist - not enough parameters:" + sources);
- }
-
- // We have all the parameters at this point, now check if one of the points is constant
- double[] constants;
- constants = getConstants(mv1);
- MultiValueSource other = mv2;
- if (constants == null) {
- constants = getConstants(mv2);
- other = mv1;
- }
-
- if (constants != null && other instanceof VectorValueSource) {
- return new HaversineConstFunction(constants[0], constants[1], (VectorValueSource)other);
- }
-
- return new HaversineFunction(mv1, mv2, DistanceUtils.EARTH_MEAN_RADIUS_KM, true);
- }
- };
-
- /** make a MultiValueSource from two non MultiValueSources */
- private static VectorValueSource makeMV(List<ValueSource> sources, List<ValueSource> orig) throws SyntaxError {
- ValueSource vs1 = sources.get(0);
- ValueSource vs2 = sources.get(1);
-
- if (vs1 instanceof MultiValueSource || vs2 instanceof MultiValueSource) {
- throw new SyntaxError("geodist - invalid parameters:" + orig);
- }
- return new VectorValueSource(sources);
- }
-
- private static MultiValueSource parsePoint(FunctionQParser fp) throws SyntaxError {
- String pt = fp.getParam(SpatialParams.POINT);
- if (pt == null) return null;
- double[] point = null;
- try {
- point = ParseUtils.parseLatitudeLongitude(pt);
- } catch (InvalidShapeException e) {
- throw new SyntaxError("Bad spatial pt:" + pt);
- }
- return new VectorValueSource(Arrays.<ValueSource>asList(new DoubleConstValueSource(point[0]),new DoubleConstValueSource(point[1])));
- }
-
- private static double[] getConstants(MultiValueSource vs) {
- if (!(vs instanceof VectorValueSource)) return null;
- List<ValueSource> sources = ((VectorValueSource)vs).getSources();
- if (sources.get(0) instanceof ConstNumberSource && sources.get(1) instanceof ConstNumberSource) {
- return new double[] { ((ConstNumberSource) sources.get(0)).getDouble(), ((ConstNumberSource) sources.get(1)).getDouble()};
- }
- return null;
- }
-
- private static MultiValueSource parseSfield(FunctionQParser fp) throws SyntaxError {
- String sfield = fp.getParam(SpatialParams.FIELD);
- if (sfield == null) return null;
- SchemaField sf = fp.getReq().getSchema().getField(sfield);
- ValueSource vs = sf.getType().getValueSource(sf, fp);
- if (!(vs instanceof MultiValueSource)) {
- throw new SyntaxError("Spatial field must implement MultiValueSource:" + sf);
- }
- return (MultiValueSource)vs;
- }
-
-
- //////////////////////////////////////////////////////////////////////////////////////
-
private final double latCenter;
private final double lonCenter;
private final VectorValueSource p2; // lat+lon, just saved for display/debugging
@@ -184,7 +43,6 @@ public class HaversineConstFunction exte
private final double latCenterRad_cos; // cos(latCenter)
private static final double EARTH_MEAN_DIAMETER = DistanceUtils.EARTH_MEAN_RADIUS_KM * 2;
-
public HaversineConstFunction(double latCenter, double lonCenter, VectorValueSource vs) {
this.latCenter = latCenter;
this.lonCenter = lonCenter;
@@ -257,4 +115,5 @@ public class HaversineConstFunction exte
public String description() {
return name() + '(' + p2 + ',' + latCenter + ',' + lonCenter + ')';
}
+
}
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java Wed Jul 3 23:26:32 2013
@@ -20,8 +20,11 @@ package org.apache.solr.search.grouping.
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.grouping.SearchGroup;
import org.apache.lucene.util.BytesRef;
+import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.ShardParams;
import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.handler.component.ResponseBuilder;
import org.apache.solr.handler.component.ShardRequest;
import org.apache.solr.handler.component.ShardResponse;
@@ -31,6 +34,8 @@ import org.apache.solr.search.grouping.d
import org.apache.solr.search.grouping.distributed.shardresultserializer.SearchGroupsResultTransformer;
import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
import java.util.*;
/**
@@ -61,7 +66,38 @@ public class SearchGroupShardResponsePro
try {
int maxElapsedTime = 0;
int hitCountDuringFirstPhase = 0;
+
+ NamedList<Object> shardInfo = null;
+ if (rb.req.getParams().getBool(ShardParams.SHARDS_INFO, false)) {
+ shardInfo = new SimpleOrderedMap<Object>();
+ rb.rsp.getValues().add(ShardParams.SHARDS_INFO + ".firstPhase", shardInfo);
+ }
+
for (ShardResponse srsp : shardRequest.responses) {
+ if (shardInfo != null) {
+ SimpleOrderedMap<Object> nl = new SimpleOrderedMap<Object>();
+
+ if (srsp.getException() != null) {
+ Throwable t = srsp.getException();
+ if (t instanceof SolrServerException) {
+ t = ((SolrServerException) t).getCause();
+ }
+ nl.add("error", t.toString());
+ StringWriter trace = new StringWriter();
+ t.printStackTrace(new PrintWriter(trace));
+ nl.add("trace", trace.toString());
+ } else {
+ nl.add("numFound", (Integer) srsp.getSolrResponse().getResponse().get("totalHitCount"));
+ }
+ if (srsp.getSolrResponse() != null) {
+ nl.add("time", srsp.getSolrResponse().getElapsedTime());
+ }
+
+ shardInfo.add(srsp.getShard(), nl);
+ }
+ if (rb.req.getParams().getBool(ShardParams.SHARDS_TOLERANT, false) && srsp.getException() != null) {
+ continue; // continue if there was an error and we're tolerant.
+ }
maxElapsedTime = (int) Math.max(maxElapsedTime, srsp.getSolrResponse().getElapsedTime());
@SuppressWarnings("unchecked")
NamedList<NamedList> firstPhaseResult = (NamedList<NamedList>) srsp.getSolrResponse().getResponse().get("firstPhase");
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java Wed Jul 3 23:26:32 2013
@@ -23,8 +23,11 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.grouping.GroupDocs;
import org.apache.lucene.search.grouping.TopGroups;
import org.apache.lucene.util.BytesRef;
+import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.ShardParams;
import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.handler.component.ResponseBuilder;
import org.apache.solr.handler.component.ShardDoc;
import org.apache.solr.handler.component.ShardRequest;
@@ -35,6 +38,8 @@ import org.apache.solr.search.grouping.d
import org.apache.solr.search.grouping.distributed.shardresultserializer.TopGroupsResultTransformer;
import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -76,18 +81,66 @@ public class TopGroupsShardResponseProce
}
TopGroupsResultTransformer serializer = new TopGroupsResultTransformer(rb);
+
+ NamedList<Object> shardInfo = null;
+ if (rb.req.getParams().getBool(ShardParams.SHARDS_INFO, false)) {
+ shardInfo = new SimpleOrderedMap<Object>();
+ rb.rsp.getValues().add(ShardParams.SHARDS_INFO, shardInfo);
+ }
+
for (ShardResponse srsp : shardRequest.responses) {
+ SimpleOrderedMap<Object> individualShardInfo = null;
+ if (shardInfo != null) {
+ individualShardInfo = new SimpleOrderedMap<Object>();
+
+ if (srsp.getException() != null) {
+ Throwable t = srsp.getException();
+ if (t instanceof SolrServerException) {
+ t = ((SolrServerException) t).getCause();
+ }
+ individualShardInfo.add("error", t.toString());
+ StringWriter trace = new StringWriter();
+ t.printStackTrace(new PrintWriter(trace));
+ individualShardInfo.add("trace", trace.toString());
+ } else {
+ // summary for successful shard response is added down below
+ }
+ if (srsp.getSolrResponse() != null) {
+ individualShardInfo.add("time", srsp.getSolrResponse().getElapsedTime());
+ }
+
+ shardInfo.add(srsp.getShard(), individualShardInfo);
+ }
+ if (rb.req.getParams().getBool(ShardParams.SHARDS_TOLERANT, false) && srsp.getException() != null) {
+ continue; // continue if there was an error and we're tolerant.
+ }
NamedList<NamedList> secondPhaseResult = (NamedList<NamedList>) srsp.getSolrResponse().getResponse().get("secondPhase");
Map<String, ?> result = serializer.transformToNative(secondPhaseResult, groupSort, sortWithinGroup, srsp.getShard());
+ int numFound = 0;
+ float maxScore = Float.NaN;
for (String field : commandTopGroups.keySet()) {
TopGroups<BytesRef> topGroups = (TopGroups<BytesRef>) result.get(field);
if (topGroups == null) {
continue;
}
+ if (individualShardInfo != null) { // keep track of this when shards.info=true
+ numFound += topGroups.totalHitCount;
+ if (Float.isNaN(maxScore) || topGroups.maxScore > maxScore) maxScore = topGroups.maxScore;
+ }
commandTopGroups.get(field).add(topGroups);
}
for (String query : queries) {
- commandTopDocs.get(query).add((QueryCommandResult) result.get(query));
+ QueryCommandResult queryCommandResult = (QueryCommandResult) result.get(query);
+ if (individualShardInfo != null) { // keep track of this when shards.info=true
+ numFound += queryCommandResult.getMatches();
+ float thisMax = queryCommandResult.getTopDocs().getMaxScore();
+ if (Float.isNaN(maxScore) || thisMax > maxScore) maxScore = thisMax;
+ }
+ commandTopDocs.get(query).add(queryCommandResult);
+ }
+ if (individualShardInfo != null) { // when shards.info=true
+ individualShardInfo.add("numFound", numFound);
+ individualShardInfo.add("maxScore", maxScore);
}
}
try {
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/similarities/SweetSpotSimilarityFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/similarities/SweetSpotSimilarityFactory.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/similarities/SweetSpotSimilarityFactory.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/search/similarities/SweetSpotSimilarityFactory.java Wed Jul 3 23:26:32 2013
@@ -180,7 +180,7 @@ public class SweetSpotSimilarityFactory
private static final class HyperbolicSweetSpotSimilarity
extends SweetSpotSimilarity {
@Override
- public float tf(int freq) {
+ public float tf(float freq) {
return hyperbolicTf(freq);
}
};
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java Wed Jul 3 23:26:32 2013
@@ -17,35 +17,6 @@
package org.apache.solr.servlet;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.WeakHashMap;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
import org.apache.commons.io.IOUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
@@ -81,6 +52,34 @@ import org.apache.solr.util.FastWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.WeakHashMap;
+
/**
* This filter looks at the incoming URL maps them to handlers defined in solrconfig.xml
*
@@ -116,12 +115,11 @@ public class SolrDispatchFilter implemen
{
log.info("SolrDispatchFilter.init()");
- CoreContainer.Initializer init = createInitializer();
try {
// web.xml configuration
this.pathPrefix = config.getInitParameter( "path-prefix" );
- this.cores = init.initialize();
+ this.cores = createCoreContainer();
log.info("user.dir=" + System.getProperty("user.dir"));
}
catch( Throwable t ) {
@@ -132,15 +130,20 @@ public class SolrDispatchFilter implemen
log.info("SolrDispatchFilter.init() done");
}
+
+ /**
+ * Override this to change CoreContainer initialization
+ * @return a CoreContainer to hold this server's cores
+ */
+ protected CoreContainer createCoreContainer() {
+ CoreContainer cores = new CoreContainer();
+ cores.load();
+ return cores;
+ }
public CoreContainer getCores() {
return cores;
}
-
- /** Method to override to change how CoreContainer initialization is performed. */
- protected CoreContainer.Initializer createInitializer() {
- return new CoreContainer.Initializer();
- }
@Override
public void destroy() {
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java Wed Jul 3 23:26:32 2013
@@ -187,6 +187,76 @@ public final class DefaultSolrCoreState
}
}
}
+
+ @Override
+ public synchronized void closeIndexWriter(SolrCore core, boolean rollback)
+ throws IOException {
+ log.info("Closing IndexWriter...");
+ String coreName = core.getName();
+ synchronized (writerPauseLock) {
+ if (closed) {
+ throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Already closed");
+ }
+
+ // we need to wait for the Writer to fall out of use
+ // first lets stop it from being lent out
+ pauseWriter = true;
+ // then lets wait until its out of use
+ log.info("Waiting until IndexWriter is unused... core=" + coreName);
+
+ while (!writerFree) {
+ try {
+ writerPauseLock.wait(100);
+ } catch (InterruptedException e) {}
+
+ if (closed) {
+ throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE,
+ "SolrCoreState already closed");
+ }
+ }
+
+ if (indexWriter != null) {
+ if (!rollback) {
+ try {
+ log.info("Closing old IndexWriter... core=" + coreName);
+ indexWriter.close();
+ } catch (Throwable t) {
+ SolrException.log(log, "Error closing old IndexWriter. core="
+ + coreName, t);
+ }
+ } else {
+ try {
+ log.info("Rollback old IndexWriter... core=" + coreName);
+ indexWriter.rollback();
+ } catch (Throwable t) {
+ SolrException.log(log, "Error rolling back old IndexWriter. core="
+ + coreName, t);
+ }
+ }
+ }
+
+ }
+ }
+
+ @Override
+ public synchronized void openIndexWriter(SolrCore core) throws IOException {
+ log.info("Creating new IndexWriter...");
+ synchronized (writerPauseLock) {
+ if (closed) {
+ throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Already closed");
+ }
+
+ try {
+ indexWriter = createMainIndexWriter(core, "DirectUpdateHandler2");
+ log.info("New IndexWriter is ready to be used.");
+ // we need to null this so it picks up the new writer next get call
+ refCntWriter = null;
+ } finally {
+ pauseWriter = false;
+ writerPauseLock.notifyAll();
+ }
+ }
+ }
@Override
public synchronized void rollbackIndexWriter(SolrCore core) throws IOException {
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java Wed Jul 3 23:26:32 2013
@@ -108,6 +108,8 @@ public class DirectUpdateHandler2 extend
softCommitTracker = new CommitTracker("Soft", core, softCommitDocsUpperBound, softCommitTimeUpperBound, true, true);
commitWithinSoftCommit = updateHandlerInfo.commitWithinSoftCommit;
+
+
}
public DirectUpdateHandler2(SolrCore core, UpdateHandler updateHandler) {
@@ -125,6 +127,13 @@ public class DirectUpdateHandler2 extend
softCommitTracker = new CommitTracker("Soft", core, softCommitDocsUpperBound, softCommitTimeUpperBound, updateHandlerInfo.openSearcher, true);
commitWithinSoftCommit = updateHandlerInfo.commitWithinSoftCommit;
+
+ UpdateLog existingLog = updateHandler.getUpdateLog();
+ if (this.ulog != null && this.ulog == existingLog) {
+ // If we are reusing the existing update log, inform the log that it's update handler has changed.
+ // We do this as late as possible.
+ this.ulog.init(this, core);
+ }
}
private void deleteAll() throws IOException {
@@ -531,11 +540,17 @@ public class DirectUpdateHandler2 extend
}
// SolrCore.verbose("writer.commit() start writer=",writer);
- final Map<String,String> commitData = new HashMap<String,String>();
- commitData.put(SolrIndexWriter.COMMIT_TIME_MSEC_KEY,
- String.valueOf(System.currentTimeMillis()));
- writer.setCommitData(commitData);
- writer.commit();
+
+ if (writer.hasUncommittedChanges()) {
+ final Map<String,String> commitData = new HashMap<String,String>();
+ commitData.put(SolrIndexWriter.COMMIT_TIME_MSEC_KEY,
+ String.valueOf(System.currentTimeMillis()));
+ writer.setCommitData(commitData);
+ writer.commit();
+ } else {
+ log.info("No uncommitted changes. Skipping IW.commit.");
+ }
+
// SolrCore.verbose("writer.commit() end");
numDocsPending.set(0);
callPostCommitCallbacks();
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java Wed Jul 3 23:26:32 2013
@@ -114,11 +114,11 @@ public class SolrCmdDistributor {
// make sure any pending deletes are flushed
flushDeletes(1);
-
+
// TODO: this is brittle
// need to make a clone since these commands may be reused
AddUpdateCommand clone = new AddUpdateCommand(null);
-
+
clone.solrDoc = cmd.solrDoc;
clone.commitWithin = cmd.commitWithin;
clone.overwrite = cmd.overwrite;
@@ -135,10 +135,79 @@ public class SolrCmdDistributor {
}
alist.add(addRequest);
}
-
+
flushAdds(maxBufferedAddsPerServer);
}
-
+
+ /**
+ * Synchronous (blocking) add to specified node. Any error returned from node is propagated.
+ */
+ public void syncAdd(AddUpdateCommand cmd, Node node, ModifiableSolrParams params) throws IOException {
+ log.info("SYNCADD on {} : {}", node, cmd.getPrintableId());
+ checkResponses(false);
+ // flush all pending deletes
+ flushDeletes(1);
+ // flush all pending adds
+ flushAdds(1);
+ // finish with the pending requests
+ checkResponses(false);
+
+ UpdateRequestExt ureq = new UpdateRequestExt();
+ ureq.add(cmd.solrDoc, cmd.commitWithin, cmd.overwrite);
+ ureq.setParams(params);
+ syncRequest(node, ureq);
+ }
+
+ public void syncDelete(DeleteUpdateCommand cmd, List<Node> nodes, ModifiableSolrParams params) throws IOException {
+ log.info("SYNCDELETE on {} : ", nodes, cmd);
+ checkResponses(false);
+ // flush all pending adds
+ flushAdds(1);
+ // flush all pending deletes
+ flushDeletes(1);
+ // finish pending requests
+ checkResponses(false);
+
+ DeleteUpdateCommand clonedCmd = clone(cmd);
+ DeleteRequest deleteRequest = new DeleteRequest();
+ deleteRequest.cmd = clonedCmd;
+ deleteRequest.params = params;
+
+ UpdateRequestExt ureq = new UpdateRequestExt();
+ if (cmd.isDeleteById()) {
+ ureq.deleteById(cmd.getId(), cmd.getVersion());
+ } else {
+ ureq.deleteByQuery(cmd.query);
+ }
+ ureq.setParams(params);
+ for (Node node : nodes) {
+ syncRequest(node, ureq);
+ }
+ }
+
+ private void syncRequest(Node node, UpdateRequestExt ureq) {
+ Request sreq = new Request();
+ sreq.node = node;
+ sreq.ureq = ureq;
+
+ String url = node.getUrl();
+ String fullUrl;
+ if (!url.startsWith("http://") && !url.startsWith("https://")) {
+ fullUrl = "http://" + url;
+ } else {
+ fullUrl = url;
+ }
+
+ HttpSolrServer server = new HttpSolrServer(fullUrl,
+ updateShardHandler.getHttpClient());
+
+ try {
+ sreq.ursp = server.request(ureq);
+ } catch (Exception e) {
+ throw new SolrException(ErrorCode.SERVER_ERROR, "Failed synchronous update on shard " + sreq.node, sreq.exception);
+ }
+ }
+
public void distribCommit(CommitUpdateCommand cmd, List<Node> nodes,
ModifiableSolrParams params) throws IOException {
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrCoreState.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrCoreState.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrCoreState.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrCoreState.java Wed Jul 3 23:26:32 2013
@@ -88,6 +88,26 @@ public abstract class SolrCoreState {
*/
public abstract void newIndexWriter(SolrCore core, boolean rollback) throws IOException;
+
+ /**
+ * Expert method that closes the IndexWriter - you must call {@link #openIndexWriter(SolrCore)}
+ * in a finally block after calling this method.
+ *
+ * @param core that the IW belongs to
+ * @param rollback true if IW should rollback rather than close
+ * @throws IOException If there is a low-level I/O error.
+ */
+ public abstract void closeIndexWriter(SolrCore core, boolean rollback) throws IOException;
+
+ /**
+ * Expert method that opens the IndexWriter - you must call {@link #closeIndexWriter(SolrCore, boolean)}
+ * first, and then call this method in a finally block.
+ *
+ * @param core that the IW belongs to
+ * @throws IOException If there is a low-level I/O error.
+ */
+ public abstract void openIndexWriter(SolrCore core) throws IOException;
+
/**
* Get the current IndexWriter. If a new IndexWriter must be created, use the
* settings from the given {@link SolrCore}.
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java Wed Jul 3 23:26:32 2013
@@ -17,12 +17,15 @@
package org.apache.solr.update;
+import org.apache.commons.io.FileUtils;
import org.apache.lucene.index.*;
import org.apache.lucene.index.IndexWriter.IndexReaderWarmer;
import org.apache.lucene.util.InfoStream;
+import org.apache.lucene.util.PrintStreamInfoStream;
import org.apache.lucene.util.Version;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.schema.IndexSchema;
@@ -30,6 +33,10 @@ import org.apache.solr.util.SolrPluginUt
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
import java.util.List;
/**
@@ -43,7 +50,14 @@ public class SolrIndexConfig {
public static final String DEFAULT_MERGE_SCHEDULER_CLASSNAME = ConcurrentMergeScheduler.class.getName();
public final Version luceneVersion;
+ /**
+ * The explicit value of <useCompoundFile> specified on this index config
+ * @deprecated use {@link #getUseCompoundFile}
+ */
+ @Deprecated
public final boolean useCompoundFile;
+ private boolean effectiveUseCompountFileSetting;
+
public final int maxBufferedDocs;
public final int maxMergeDocs;
public final int maxIndexingThreads;
@@ -59,7 +73,7 @@ public class SolrIndexConfig {
public final PluginInfo mergedSegmentWarmerInfo;
- public String infoStreamFile = null;
+ public InfoStream infoStream = InfoStream.NO_OUTPUT;
// Available lock types
public final static String LOCK_TYPE_SIMPLE = "simple";
@@ -73,7 +87,7 @@ public class SolrIndexConfig {
@SuppressWarnings("deprecation")
private SolrIndexConfig(SolrConfig solrConfig) {
luceneVersion = solrConfig.luceneMatchVersion;
- useCompoundFile = false;
+ useCompoundFile = effectiveUseCompountFileSetting = false;
maxBufferedDocs = -1;
maxMergeDocs = -1;
maxIndexingThreads = IndexWriterConfig.DEFAULT_MAX_THREAD_STATES;
@@ -121,6 +135,7 @@ public class SolrIndexConfig {
defaultMergePolicyClassName = def.defaultMergePolicyClassName;
useCompoundFile=solrConfig.getBool(prefix+"/useCompoundFile", def.useCompoundFile);
+ effectiveUseCompountFileSetting = useCompoundFile;
maxBufferedDocs=solrConfig.getInt(prefix+"/maxBufferedDocs",def.maxBufferedDocs);
maxMergeDocs=solrConfig.getInt(prefix+"/maxMergeDocs",def.maxMergeDocs);
maxIndexingThreads=solrConfig.getInt(prefix+"/maxIndexingThreads",def.maxIndexingThreads);
@@ -134,13 +149,17 @@ public class SolrIndexConfig {
mergePolicyInfo = getPluginInfo(prefix + "/mergePolicy", solrConfig, def.mergePolicyInfo);
termIndexInterval = solrConfig.getInt(prefix + "/termIndexInterval", def.termIndexInterval);
-
+
boolean infoStreamEnabled = solrConfig.getBool(prefix + "/infoStream", false);
if(infoStreamEnabled) {
- infoStreamFile= solrConfig.get(prefix + "/infoStream/@file", null);
- log.info("IndexWriter infoStream debug log is enabled: " + infoStreamFile);
+ String infoStreamFile = solrConfig.get(prefix + "/infoStream/@file", null);
+ if (infoStreamFile == null) {
+ log.info("IndexWriter infoStream solr logging is enabled");
+ infoStream = new LoggingInfoStream();
+ } else {
+ throw new IllegalArgumentException("Remove @file from <infoStream> to output messages to solr's logfile");
+ }
}
-
mergedSegmentWarmerInfo = getPluginInfo(prefix + "/mergedSegmentWarmer", solrConfig, def.mergedSegmentWarmerInfo);
if (mergedSegmentWarmerInfo != null && solrConfig.reopenReaders == false) {
throw new IllegalArgumentException("Supplying a mergedSegmentWarmer will do nothing since reopenReaders is false");
@@ -188,6 +207,11 @@ public class SolrIndexConfig {
iwc.setSimilarity(schema.getSimilarity());
iwc.setMergePolicy(buildMergePolicy(schema));
iwc.setMergeScheduler(buildMergeScheduler(schema));
+ iwc.setInfoStream(infoStream);
+
+ // do this after buildMergePolicy since the backcompat logic
+ // there may modify the effective useCompoundFile
+ iwc.setUseCompoundFile(getUseCompoundFile());
if (maxIndexingThreads != -1) {
iwc.setMaxThreadStates(maxIndexingThreads);
@@ -199,13 +223,22 @@ public class SolrIndexConfig {
IndexReaderWarmer.class,
null,
new Class[] { InfoStream.class },
- new Object[] { InfoStream.NO_OUTPUT });
+ new Object[] { iwc.getInfoStream() });
iwc.setMergedSegmentWarmer(warmer);
}
return iwc;
}
+ /**
+ * Builds a MergePolicy, may also modify the value returned by
+ * getUseCompoundFile() for use by the IndexWriterConfig if
+ * "useCompoundFile" is specified as an init arg for
+ * an out of the box MergePolicy that no longer supports it
+ *
+ * @see #fixUseCFMergePolicyInitArg
+ * @see #getUseCompoundFile
+ */
private MergePolicy buildMergePolicy(IndexSchema schema) {
String mpClassName = mergePolicyInfo == null ? defaultMergePolicyClassName : mergePolicyInfo.className;
@@ -213,25 +246,31 @@ public class SolrIndexConfig {
if (policy instanceof LogMergePolicy) {
LogMergePolicy logMergePolicy = (LogMergePolicy) policy;
+ fixUseCFMergePolicyInitArg(LogMergePolicy.class);
if (maxMergeDocs != -1)
logMergePolicy.setMaxMergeDocs(maxMergeDocs);
- logMergePolicy.setUseCompoundFile(useCompoundFile);
+ logMergePolicy.setNoCFSRatio(getUseCompoundFile() ? 1.0 : 0.0);
if (mergeFactor != -1)
logMergePolicy.setMergeFactor(mergeFactor);
+
+
} else if (policy instanceof TieredMergePolicy) {
TieredMergePolicy tieredMergePolicy = (TieredMergePolicy) policy;
+ fixUseCFMergePolicyInitArg(TieredMergePolicy.class);
- tieredMergePolicy.setUseCompoundFile(useCompoundFile);
+ tieredMergePolicy.setNoCFSRatio(getUseCompoundFile() ? 1.0 : 0.0);
if (mergeFactor != -1) {
tieredMergePolicy.setMaxMergeAtOnce(mergeFactor);
tieredMergePolicy.setSegmentsPerTier(mergeFactor);
}
- } else {
- log.warn("Use of compound file format or mergefactor cannot be configured if merge policy is not an instance of LogMergePolicy or TieredMergePolicy. The configured policy's defaults will be used.");
+
+
+ } else if (mergeFactor != -1) {
+ log.warn("Use of <mergeFactor> cannot be configured if merge policy is not an instance of LogMergePolicy or TieredMergePolicy. The configured policy's defaults will be used.");
}
if (mergePolicyInfo != null)
@@ -244,9 +283,58 @@ public class SolrIndexConfig {
String msClassName = mergeSchedulerInfo == null ? SolrIndexConfig.DEFAULT_MERGE_SCHEDULER_CLASSNAME : mergeSchedulerInfo.className;
MergeScheduler scheduler = schema.getResourceLoader().newInstance(msClassName, MergeScheduler.class);
- if (mergeSchedulerInfo != null)
- SolrPluginUtils.invokeSetters(scheduler, mergeSchedulerInfo.initArgs);
+ if (mergeSchedulerInfo != null) {
+ // LUCENE-5080: these two setters are removed, so we have to invoke setMaxMergesAndThreads
+ // if someone has them configured.
+ if (scheduler instanceof ConcurrentMergeScheduler) {
+ NamedList args = mergeSchedulerInfo.initArgs.clone();
+ Integer maxMergeCount = (Integer) args.remove("maxMergeCount");
+ if (maxMergeCount == null) {
+ maxMergeCount = ((ConcurrentMergeScheduler) scheduler).getMaxMergeCount();
+ }
+ Integer maxThreadCount = (Integer) args.remove("maxThreadCount");
+ if (maxThreadCount == null) {
+ maxThreadCount = ((ConcurrentMergeScheduler) scheduler).getMaxThreadCount();
+ }
+ ((ConcurrentMergeScheduler)scheduler).setMaxMergesAndThreads(maxMergeCount, maxThreadCount);
+ SolrPluginUtils.invokeSetters(scheduler, args);
+ } else {
+ SolrPluginUtils.invokeSetters(scheduler, mergeSchedulerInfo.initArgs);
+ }
+ }
return scheduler;
}
+
+ public boolean getUseCompoundFile() {
+ return effectiveUseCompountFileSetting;
+ }
+
+ /**
+ * Lucene 4.4 removed the setUseCompoundFile(boolean) method from the two
+ * conrete MergePolicies provided with Lucene/Solr and added it to the
+ * IndexWRiterConfig.
+ * In the event that users have a value explicitly configured for this
+ * setter in their MergePolicy init args, we remove it from the MergePolicy
+ * init args, update the 'effective' useCompoundFile setting used by the
+ * IndexWriterConfig, and warn about discontinuing to use this init arg.
+ *
+ * @see #getUseCompoundFile
+ */
+ private void fixUseCFMergePolicyInitArg(Class c) {
+
+ if (null == mergePolicyInfo || null == mergePolicyInfo.initArgs) return;
+
+ Object useCFSArg = mergePolicyInfo.initArgs.remove("useCompoundFile");
+ if (null != useCFSArg) {
+ log.warn("Ignoring 'useCompoundFile' specified as an init arg for the <mergePolicy> since it is no directly longer supported by " + c.getSimpleName());
+ if (useCFSArg instanceof Boolean) {
+ boolean cfs = ((Boolean)useCFSArg).booleanValue();
+ log.warn("Please update your config to specify <useCompoundFile>"+cfs+"</useCompoundFile> directly in your <indexConfig> settings.");
+ effectiveUseCompountFileSetting = cfs;
+ } else {
+ log.error("MergePolicy's 'useCompoundFile' init arg is not a boolean, can not apply back compat logic to apply to the IndexWriterConfig: " + useCFSArg.toString());
+ }
+ }
+ }
}
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java Wed Jul 3 23:26:32 2013
@@ -77,7 +77,7 @@ public class SolrIndexWriter extends Ind
super(directory,
config.toIndexWriterConfig(schema).
setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND).
- setIndexDeletionPolicy(delPolicy).setCodec(codec).setInfoStream(toInfoStream(config))
+ setIndexDeletionPolicy(delPolicy).setCodec(codec)
);
log.debug("Opened Writer " + name);
this.name = name;
@@ -88,20 +88,6 @@ public class SolrIndexWriter extends Ind
this.directoryFactory = factory;
}
- private static InfoStream toInfoStream(SolrIndexConfig config) throws IOException {
- String infoStreamFile = config.infoStreamFile;
- if (infoStreamFile != null) {
- File f = new File(infoStreamFile);
- File parent = f.getParentFile();
- if (parent != null) parent.mkdirs();
- FileOutputStream fos = new FileOutputStream(f, true);
- return new PrintStreamInfoStream(new PrintStream(fos, true, "UTF-8"));
- } else {
- return InfoStream.NO_OUTPUT;
- }
- }
-
-
/**
* use DocumentBuilder now...
* private final void addField(Document doc, String name, String val) {
@@ -164,11 +150,8 @@ public class SolrIndexWriter extends Ind
if (infoStream != null) {
infoStream.close();
}
-
isClosed = true;
-
directoryFactory.release(directory);
-
numCloses.incrementAndGet();
}
}
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/TransactionLog.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/TransactionLog.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/TransactionLog.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/TransactionLog.java Wed Jul 3 23:26:32 2013
@@ -17,15 +17,6 @@
package org.apache.solr.update;
-import org.apache.lucene.util.BytesRef;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.util.FastInputStream;
-import org.apache.solr.common.util.FastOutputStream;
-import org.apache.solr.common.util.JavaBinCodec;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
@@ -34,15 +25,23 @@ import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
+import org.apache.lucene.util.BytesRef;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.util.DataInputInputStream;
+import org.apache.solr.common.util.FastInputStream;
+import org.apache.solr.common.util.FastOutputStream;
+import org.apache.solr.common.util.JavaBinCodec;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
/**
* Log Format: List{Operation, Version, ...}
* ADD, VERSION, DOC
@@ -75,7 +74,7 @@ public class TransactionLog {
FastOutputStream fos; // all accesses to this stream should be synchronized on "this" (The TransactionLog)
int numRecords;
- volatile boolean deleteOnClose = true; // we can delete old tlogs since they are currently only used for real-time-get (and in the future, recovery)
+ protected volatile boolean deleteOnClose = true; // we can delete old tlogs since they are currently only used for real-time-get (and in the future, recovery)
AtomicInteger refcount = new AtomicInteger(1);
Map<String,Integer> globalStringMap = new HashMap<String, Integer>();
@@ -98,7 +97,7 @@ public class TransactionLog {
};
public class LogCodec extends JavaBinCodec {
- public LogCodec() {
+ public LogCodec(JavaBinCodec.ObjectResolver resolver) {
super(resolver);
}
@@ -121,7 +120,7 @@ public class TransactionLog {
}
@Override
- public String readExternString(FastInputStream fis) throws IOException {
+ public String readExternString(DataInputInputStream fis) throws IOException {
int idx = readSize(fis);
if (idx != 0) {// idx != 0 is the index of the extern string
// no need to synchronize globalStringList - it's only updated before the first record is written to the log
@@ -191,6 +190,9 @@ public class TransactionLog {
}
}
+ // for subclasses
+ protected TransactionLog() {}
+
/** Returns the number of records in the log (currently includes the header and an optional commit).
* Note: currently returns 0 for reopened existing log files.
*/
@@ -245,7 +247,7 @@ public class TransactionLog {
public long writeData(Object o) {
- LogCodec codec = new LogCodec();
+ LogCodec codec = new LogCodec(resolver);
try {
long pos = fos.size(); // if we had flushed, this should be equal to channel.position()
codec.init(fos);
@@ -260,7 +262,7 @@ public class TransactionLog {
private void readHeader(FastInputStream fis) throws IOException {
// read existing header
fis = fis != null ? fis : new ChannelFastInputStream(channel, 0);
- LogCodec codec = new LogCodec();
+ LogCodec codec = new LogCodec(resolver);
Map header = (Map)codec.unmarshal(fis);
fis.readInt(); // skip size
@@ -276,7 +278,7 @@ public class TransactionLog {
}
}
- private void addGlobalStrings(Collection<String> strings) {
+ protected void addGlobalStrings(Collection<String> strings) {
if (strings == null) return;
int origSize = globalStringMap.size();
for (String s : strings) {
@@ -297,7 +299,7 @@ public class TransactionLog {
}
}
- private void writeLogHeader(LogCodec codec) throws IOException {
+ protected void writeLogHeader(LogCodec codec) throws IOException {
long pos = fos.size();
assert pos == 0;
@@ -309,7 +311,7 @@ public class TransactionLog {
endRecord(pos);
}
- private void endRecord(long startRecordPosition) throws IOException {
+ protected void endRecord(long startRecordPosition) throws IOException {
fos.writeInt((int)(fos.size() - startRecordPosition));
numRecords++;
}
@@ -333,7 +335,7 @@ public class TransactionLog {
int lastAddSize;
public long write(AddUpdateCommand cmd, int flags) {
- LogCodec codec = new LogCodec();
+ LogCodec codec = new LogCodec(resolver);
SolrInputDocument sdoc = cmd.getSolrInputDocument();
try {
@@ -375,7 +377,7 @@ public class TransactionLog {
}
public long writeDelete(DeleteUpdateCommand cmd, int flags) {
- LogCodec codec = new LogCodec();
+ LogCodec codec = new LogCodec(resolver);
try {
checkWriteHeader(codec, null);
@@ -405,7 +407,7 @@ public class TransactionLog {
}
public long writeDeleteByQuery(DeleteUpdateCommand cmd, int flags) {
- LogCodec codec = new LogCodec();
+ LogCodec codec = new LogCodec(resolver);
try {
checkWriteHeader(codec, null);
@@ -431,7 +433,7 @@ public class TransactionLog {
public long writeCommit(CommitUpdateCommand cmd, int flags) {
- LogCodec codec = new LogCodec();
+ LogCodec codec = new LogCodec(resolver);
synchronized (this) {
try {
long pos = fos.size(); // if we had flushed, this should be equal to channel.position()
@@ -479,7 +481,7 @@ public class TransactionLog {
}
ChannelFastInputStream fis = new ChannelFastInputStream(channel, pos);
- LogCodec codec = new LogCodec();
+ LogCodec codec = new LogCodec(resolver);
return codec.readVal(fis);
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
@@ -529,7 +531,7 @@ public class TransactionLog {
}
}
- private void close() {
+ protected void close() {
try {
if (debug) {
log.debug("Closing tlog" + this);
@@ -570,19 +572,22 @@ public class TransactionLog {
/** Returns a single threaded reverse reader */
public ReverseReader getReverseReader() throws IOException {
- return new ReverseReader();
+ return new FSReverseReader();
}
public class LogReader {
- ChannelFastInputStream fis;
- private LogCodec codec = new LogCodec();
+ private ChannelFastInputStream fis;
+ private LogCodec codec = new LogCodec(resolver);
public LogReader(long startingPos) {
incref();
fis = new ChannelFastInputStream(channel, startingPos);
}
+ // for classes that extend
+ protected LogReader() {}
+
/** Returns the next object from the log, or null if none available.
*
* @return The log record, or null if EOF
@@ -638,11 +643,32 @@ public class TransactionLog {
}
- public class ReverseReader {
+ public abstract class ReverseReader {
+
+
+
+ /** Returns the next object from the log, or null if none available.
+ *
+ * @return The log record, or null if EOF
+ * @throws IOException If there is a low-level I/O error.
+ */
+ public abstract Object next() throws IOException;
+
+ /* returns the position in the log file of the last record returned by next() */
+ public abstract long position();
+ public abstract void close();
+
+ @Override
+ public abstract String toString() ;
+
+
+ }
+
+ public class FSReverseReader extends ReverseReader {
ChannelFastInputStream fis;
- private LogCodec codec = new LogCodec() {
+ private LogCodec codec = new LogCodec(resolver) {
@Override
- public SolrInputDocument readSolrInputDocument(FastInputStream dis) {
+ public SolrInputDocument readSolrInputDocument(DataInputInputStream dis) {
// Given that the SolrInputDocument is last in an add record, it's OK to just skip
// reading it completely.
return null;
@@ -652,7 +678,7 @@ public class TransactionLog {
int nextLength; // length of the next record (the next one closer to the start of the log file)
long prevPos; // where we started reading from last time (so prevPos - nextLength == start of next record)
- public ReverseReader() throws IOException {
+ public FSReverseReader() throws IOException {
incref();
long sz;
Modified: lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/UpdateHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/UpdateHandler.java?rev=1499601&r1=1499600&r2=1499601&view=diff
==============================================================================
--- lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/UpdateHandler.java (original)
+++ lucene/dev/branches/security/solr/core/src/java/org/apache/solr/update/UpdateHandler.java Wed Jul 3 23:26:32 2013
@@ -18,10 +18,11 @@
package org.apache.solr.update;
-import java.io.File;
import java.io.IOException;
import java.util.Vector;
+import org.apache.solr.core.DirectoryFactory;
+import org.apache.solr.core.HdfsDirectoryFactory;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrEventListener;
@@ -52,7 +53,7 @@ public abstract class UpdateHandler impl
protected Vector<SolrEventListener> softCommitCallbacks = new Vector<SolrEventListener>();
protected Vector<SolrEventListener> optimizeCallbacks = new Vector<SolrEventListener>();
- protected volatile UpdateLog ulog;
+ protected final UpdateLog ulog;
private void parseEventListeners() {
final Class<SolrEventListener> clazz = SolrEventListener.class;
@@ -71,34 +72,6 @@ public abstract class UpdateHandler impl
}
}
-
- private void initLog(PluginInfo ulogPluginInfo) {
- if (ulogPluginInfo != null && ulogPluginInfo.isEnabled()) {
- ulog = new UpdateLog();
- ulog.init(ulogPluginInfo);
- // ulog = core.createInitInstance(ulogPluginInfo, UpdateLog.class, "update log", "solr.NullUpdateLog");
- ulog.init(this, core);
- }
- }
-
- // not thread safe - for startup
- private void clearLog(PluginInfo ulogPluginInfo) {
- if (ulogPluginInfo == null) return;
- File tlogDir = UpdateLog.getTlogDir(core, ulogPluginInfo);
- log.info("Clearing tlog files, tlogDir=" + tlogDir);
- if (tlogDir.exists()) {
- String[] files = UpdateLog.getLogList(tlogDir);
- for (String file : files) {
- File f = new File(tlogDir, file);
- boolean s = f.delete();
- if (!s) {
- log.error("Could not remove tlog file:" + f.getAbsolutePath());
- //throw new SolrException(ErrorCode.SERVER_ERROR, "Could not remove tlog file:" + f.getAbsolutePath());
- }
- }
- }
- }
-
protected void callPostCommitCallbacks() {
for (SolrEventListener listener : commitCallbacks) {
listener.postCommit();
@@ -127,14 +100,43 @@ public abstract class UpdateHandler impl
idFieldType = idField!=null ? idField.getType() : null;
parseEventListeners();
PluginInfo ulogPluginInfo = core.getSolrConfig().getPluginInfo(UpdateLog.class.getName());
- if (!core.isReloaded() && !core.getDirectoryFactory().isPersistent()) {
- clearLog(ulogPluginInfo);
- }
- if (updateLog == null) {
- initLog(ulogPluginInfo);
+
+
+ if (updateLog == null && ulogPluginInfo != null && ulogPluginInfo.isEnabled()) {
+ String dataDir = (String)ulogPluginInfo.initArgs.get("dir");
+
+ String ulogDir = core.getCoreDescriptor().getUlogDir();
+ if (ulogDir != null) {
+ dataDir = ulogDir;
+ }
+ if (dataDir == null || dataDir.length()==0) {
+ dataDir = core.getDataDir();
+ }
+
+ if (dataDir != null && dataDir.startsWith("hdfs:/")) {
+ DirectoryFactory dirFactory = core.getDirectoryFactory();
+ if (dirFactory instanceof HdfsDirectoryFactory) {
+ ulog = new HdfsUpdateLog(((HdfsDirectoryFactory)dirFactory).getConfDir());
+ } else {
+ ulog = new HdfsUpdateLog();
+ }
+
+ } else {
+ ulog = new UpdateLog();
+ }
+
+ if (!core.isReloaded() && !core.getDirectoryFactory().isPersistent()) {
+ ulog.clearLog(core, ulogPluginInfo);
+ }
+
+ ulog.init(ulogPluginInfo);
+
+ ulog.init(this, core);
} else {
- this.ulog = updateLog;
+ ulog = updateLog;
}
+ // ulog.init() when reusing an existing log is deferred (currently at the end of the DUH2 constructor
+
}
/**