You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by si...@apache.org on 2011/02/09 10:36:03 UTC
svn commit: r1068809 [26/36] - in /lucene/dev/branches/docvalues: ./
dev-tools/eclipse/ dev-tools/idea/.idea/ dev-tools/idea/.idea/copyright/
dev-tools/idea/lucene/ dev-tools/idea/lucene/contrib/ant/
dev-tools/idea/lucene/contrib/queryparser/ dev-tools...
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/PatternTokenizerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/PatternTokenizerFactory.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/PatternTokenizerFactory.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/PatternTokenizerFactory.java Wed Feb 9 09:35:27 2011
@@ -19,13 +19,9 @@ package org.apache.solr.analysis;
import java.io.IOException;
import java.io.Reader;
-import java.util.ArrayList;
-import java.util.List;
import java.util.Map;
-import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.pattern.PatternTokenizer;
import org.apache.solr.common.SolrException;
@@ -104,65 +100,4 @@ public class PatternTokenizerFactory ext
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, ex );
}
}
-
- /**
- * This behaves just like String.split( ), but returns a list of Tokens
- * rather then an array of strings
- * NOTE: This method is not used in 1.4.
- * @deprecated
- */
- @Deprecated
- public static List<Token> split( Matcher matcher, String input )
- {
- int index = 0;
- int lastNonEmptySize = Integer.MAX_VALUE;
- ArrayList<Token> matchList = new ArrayList<Token>();
-
- // Add segments before each match found
- while(matcher.find()) {
- String match = input.subSequence(index, matcher.start()).toString();
- matchList.add( new Token( match, index, matcher.start()) );
- index = matcher.end();
- if( match.length() > 0 ) {
- lastNonEmptySize = matchList.size();
- }
- }
-
- // If no match is found, return the full string
- if (index == 0) {
- matchList.add( new Token( input, 0, input.length()) );
- }
- else {
- String match = input.subSequence(index, input.length()).toString();
- matchList.add( new Token( match, index, input.length()) );
- if( match.length() > 0 ) {
- lastNonEmptySize = matchList.size();
- }
- }
-
- // Don't use trailing empty strings. This behavior matches String.split();
- if( lastNonEmptySize < matchList.size() ) {
- return matchList.subList( 0, lastNonEmptySize );
- }
- return matchList;
- }
-
- /**
- * Create tokens from the matches in a matcher
- * NOTE: This method is not used in 1.4.
- * @deprecated
- */
- @Deprecated
- public static List<Token> group( Matcher matcher, String input, int group )
- {
- ArrayList<Token> matchList = new ArrayList<Token>();
- while(matcher.find()) {
- Token t = new Token(
- matcher.group(group),
- matcher.start(group),
- matcher.end(group) );
- matchList.add( t );
- }
- return matchList;
- }
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/PositionFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/PositionFilterFactory.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/PositionFilterFactory.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/PositionFilterFactory.java Wed Feb 9 09:35:27 2011
@@ -33,6 +33,7 @@ import java.util.Map;
public class PositionFilterFactory extends BaseTokenFilterFactory {
private int positionIncrement;
+ @Override
public void init(Map<String, String> args) {
super.init(args);
positionIncrement = getInt("positionIncrement", 0);
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/RussianLetterTokenizerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/RussianLetterTokenizerFactory.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/RussianLetterTokenizerFactory.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/RussianLetterTokenizerFactory.java Wed Feb 9 09:35:27 2011
@@ -37,10 +37,11 @@ public class RussianLetterTokenizerFacto
throw new SolrException(ErrorCode.SERVER_ERROR,
"The charset parameter is no longer supported. "
+ "Please process your documents as Unicode instead.");
+ assureMatchVersion();
+ warnDeprecated("Use StandardTokenizerFactory instead.");
}
public RussianLetterTokenizer create(Reader in) {
- assureMatchVersion();
return new RussianLetterTokenizer(luceneMatchVersion,in);
}
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/ShingleFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/ShingleFilterFactory.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/ShingleFilterFactory.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/ShingleFilterFactory.java Wed Feb 9 09:35:27 2011
@@ -34,6 +34,7 @@ public class ShingleFilterFactory extend
private boolean outputUnigramsIfNoShingles;
private String tokenSeparator;
+ @Override
public void init(Map<String, String> args) {
super.init(args);
maxShingleSize = getInt("maxShingleSize",
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/SolrAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/SolrAnalyzer.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/SolrAnalyzer.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/SolrAnalyzer.java Wed Feb 9 09:35:27 2011
@@ -32,6 +32,7 @@ public abstract class SolrAnalyzer exten
posIncGap=gap;
}
+ @Override
public int getPositionIncrementGap(String fieldName) {
return posIncGap;
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/StopFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/StopFilterFactory.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/StopFilterFactory.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/StopFilterFactory.java Wed Feb 9 09:35:27 2011
@@ -29,6 +29,15 @@ import java.util.Set;
import java.io.IOException;
/**
+ * Factory for {@link StopFilter}.
+ * <pre class="prettyprint" >
+ * <fieldType name="text_stop" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
+ * <analyzer>
+ * <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+ * <filter class="solr.StopFilterFactory" ignoreCase="true"
+ * words="stopwords.txt" enablePositionIncrements="true"/>
+ * </analyzer>
+ * </fieldType></pre>
* @version $Id$
*/
public class StopFilterFactory extends BaseTokenFilterFactory implements ResourceLoaderAware {
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/SynonymFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/SynonymFilterFactory.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/SynonymFilterFactory.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/SynonymFilterFactory.java Wed Feb 9 09:35:27 2011
@@ -22,6 +22,7 @@ import org.apache.lucene.analysis.synony
import org.apache.lucene.analysis.synonym.SynonymMap;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.solr.common.ResourceLoader;
+import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.util.plugin.ResourceLoaderAware;
@@ -40,7 +41,8 @@ public class SynonymFilterFactory extend
public void inform(ResourceLoader loader) {
String synonyms = args.get("synonyms");
-
+ if (synonyms == null)
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Missing required argument 'synonyms'.");
boolean ignoreCase = getBoolean("ignoreCase", false);
boolean expand = getBoolean("expand", true);
@@ -50,26 +52,24 @@ public class SynonymFilterFactory extend
tokFactory = loadTokenizerFactory( loader, tf, args );
}
- if (synonyms != null) {
- List<String> wlist=null;
- try {
- File synonymFile = new File(synonyms);
- if (synonymFile.exists()) {
- wlist = loader.getLines(synonyms);
- } else {
- List<String> files = StrUtils.splitFileNames(synonyms);
- wlist = new ArrayList<String>();
- for (String file : files) {
- List<String> lines = loader.getLines(file.trim());
- wlist.addAll(lines);
- }
+ List<String> wlist=null;
+ try {
+ File synonymFile = new File(synonyms);
+ if (synonymFile.exists()) {
+ wlist = loader.getLines(synonyms);
+ } else {
+ List<String> files = StrUtils.splitFileNames(synonyms);
+ wlist = new ArrayList<String>();
+ for (String file : files) {
+ List<String> lines = loader.getLines(file.trim());
+ wlist.addAll(lines);
}
- } catch (IOException e) {
- throw new RuntimeException(e);
}
- synMap = new SynonymMap(ignoreCase);
- parseRules(wlist, synMap, "=>", ",", expand,tokFactory);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
}
+ synMap = new SynonymMap(ignoreCase);
+ parseRules(wlist, synMap, "=>", ",", expand,tokFactory);
}
private SynonymMap synMap;
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/TokenizerChain.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/TokenizerChain.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/TokenizerChain.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/TokenizerChain.java Wed Feb 9 09:35:27 2011
@@ -73,6 +73,7 @@ public final class TokenizerChain extend
return new TokenStreamInfo(tk,ts);
}
+ @Override
public String toString() {
StringBuilder sb = new StringBuilder("TokenizerChain(");
for (CharFilterFactory filter: charFilters) {
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java Wed Feb 9 09:35:27 2011
@@ -17,6 +17,7 @@
package org.apache.solr.analysis;
import org.apache.lucene.analysis.NumericTokenStream;
+import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.solr.common.SolrException;
import org.apache.solr.schema.DateField;
@@ -56,6 +57,9 @@ final class TrieTokenizer extends Tokeni
protected final int precisionStep;
protected final TrieTypes type;
protected final NumericTokenStream ts;
+
+ protected final OffsetAttribute ofsAtt = addAttribute(OffsetAttribute.class);
+ protected int startOfs, endOfs;
static NumericTokenStream getNumericTokenStream(int precisionStep) {
return new NumericTokenStream(precisionStep);
@@ -82,6 +86,8 @@ final class TrieTokenizer extends Tokeni
input = super.input;
char[] buf = new char[32];
int len = input.read(buf);
+ this.startOfs = correctOffset(0);
+ this.endOfs = correctOffset(len);
String v = new String(buf, 0, len);
switch (type) {
case INTEGER:
@@ -105,13 +111,32 @@ final class TrieTokenizer extends Tokeni
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to create TrieIndexTokenizer", e);
}
+ }
+ @Override
+ public void close() throws IOException {
+ super.close();
+ ts.close();
+ }
+
+ @Override
+ public void reset() throws IOException {
+ super.reset();
ts.reset();
}
-
@Override
public boolean incrementToken() throws IOException {
- return ts.incrementToken();
+ if (ts.incrementToken()) {
+ ofsAtt.setOffset(startOfs, endOfs);
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public void end() throws IOException {
+ ts.end();
+ ofsAtt.setOffset(endOfs, endOfs);
}
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/SolrZkServer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/SolrZkServer.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/SolrZkServer.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/SolrZkServer.java Wed Feb 9 09:35:27 2011
@@ -319,6 +319,7 @@ class SolrZkServerProps extends QuorumPe
* @throws java.io.IOException
* @throws ConfigException
*/
+ @Override
public void parseProperties(Properties zkProp)
throws IOException, ConfigException {
for (Entry<Object, Object> entry : zkProp.entrySet()) {
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/ZkController.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/ZkController.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/ZkController.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/ZkController.java Wed Feb 9 09:35:27 2011
@@ -186,7 +186,7 @@ public final class ZkController {
/**
* @param collection
* @param fileName
- * @return
+ * @return true if config file exists
* @throws KeeperException
* @throws InterruptedException
*/
@@ -206,7 +206,7 @@ public final class ZkController {
/**
* @param zkConfigName
* @param fileName
- * @return
+ * @return config file data (in bytes)
* @throws KeeperException
* @throws InterruptedException
*/
@@ -250,7 +250,7 @@ public final class ZkController {
}
/**
- * @return
+ * @return zookeeper server address
*/
public String getZkServerAddress() {
return zkServerAddress;
@@ -392,7 +392,7 @@ public final class ZkController {
/**
* @param path
- * @return
+ * @return true if the path exists
* @throws KeeperException
* @throws InterruptedException
*/
@@ -403,7 +403,7 @@ public final class ZkController {
/**
* @param collection
- * @return
+ * @return config value
* @throws KeeperException
* @throws InterruptedException
* @throws IOException
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java Wed Feb 9 09:35:27 2011
@@ -68,6 +68,7 @@ public class ZkSolrResourceLoader extend
*
* @return the stream for the named resource
*/
+ @Override
public InputStream openResource(String resource) {
InputStream is = null;
String file = collectionZkPath + "/" + resource;
@@ -93,12 +94,14 @@ public class ZkSolrResourceLoader extend
return is;
}
+ @Override
public String getConfigDir() {
throw new ZooKeeperException(
ErrorCode.SERVER_ERROR,
"ZkSolrResourceLoader does not support getConfigDir() - likely, what you are trying to do is not supported in ZooKeeper mode");
}
+ @Override
public String[] listConfigDir() {
List<String> list;
try {
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/AbstractSolrEventListener.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/AbstractSolrEventListener.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/AbstractSolrEventListener.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/AbstractSolrEventListener.java Wed Feb 9 09:35:27 2011
@@ -42,6 +42,7 @@ class AbstractSolrEventListener implemen
throw new UnsupportedOperationException();
}
+ @Override
public String toString() {
return getClass().getName() + args;
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/CoreContainer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/CoreContainer.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/CoreContainer.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/CoreContainer.java Wed Feb 9 09:35:27 2011
@@ -232,7 +232,7 @@ public class CoreContainer
cores.load(solrHome, fconf);
} else {
log.info("no solr.xml file found - using default");
- cores.load(solrHome, new ByteArrayInputStream(DEF_SOLR_XML.getBytes()));
+ cores.load(solrHome, new ByteArrayInputStream(DEF_SOLR_XML.getBytes("UTF-8")));
cores.configFile = fconf;
}
@@ -879,7 +879,7 @@ public class CoreContainer
/** Write the cores configuration through a writer.*/
void persist(Writer w) throws IOException {
- w.write("<?xml version='1.0' encoding='UTF-8'?>");
+ w.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n");
w.write("<solr");
if (this.libDir != null) {
writeAttribute(w,"sharedLib",libDir);
@@ -888,9 +888,9 @@ public class CoreContainer
w.write(">\n");
if (containerProperties != null && !containerProperties.isEmpty()) {
- writeProperties(w, containerProperties);
+ writeProperties(w, containerProperties, " ");
}
- w.write("<cores");
+ w.write(" <cores");
writeAttribute(w, "adminPath",adminPath);
if(adminHandler != null) writeAttribute(w, "adminHandler",adminHandler);
if(shareSchema) writeAttribute(w, "shareSchema","true");
@@ -903,7 +903,7 @@ public class CoreContainer
}
}
- w.write("</cores>\n");
+ w.write(" </cores>\n");
w.write("</solr>\n");
}
@@ -918,8 +918,8 @@ public class CoreContainer
/** Writes the cores configuration node for a given core. */
void persist(Writer w, CoreDescriptor dcore) throws IOException {
- w.write(" <core");
- writeAttribute(w,"name",dcore.name);
+ w.write(" <core");
+ writeAttribute(w,"name",dcore.name.equals("") ? defaultCoreName : dcore.name);
writeAttribute(w,"instanceDir",dcore.getInstanceDir());
//write config (if not default)
String opt = dcore.getConfigName();
@@ -953,14 +953,14 @@ public class CoreContainer
w.write("/>\n"); // core
else {
w.write(">\n");
- writeProperties(w, dcore.getCoreProperties());
- w.write("</core>");
+ writeProperties(w, dcore.getCoreProperties(), " ");
+ w.write(" </core>\n");
}
}
- private void writeProperties(Writer w, Properties props) throws IOException {
+ private void writeProperties(Writer w, Properties props, String indent) throws IOException {
for (Map.Entry<Object, Object> entry : props.entrySet()) {
- w.write("<property");
+ w.write(indent + "<property");
writeAttribute(w,"name",entry.getKey());
writeAttribute(w,"value",entry.getValue());
w.write("/>\n");
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RAMDirectoryFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RAMDirectoryFactory.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RAMDirectoryFactory.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RAMDirectoryFactory.java Wed Feb 9 09:35:27 2011
@@ -45,6 +45,7 @@ public class RAMDirectoryFactory extends
}
}
+ @Override
public boolean exists(String path) {
synchronized (RAMDirectoryFactory.class) {
RefCntRamDirectory directory = directories.get(path);
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RefCntRamDirectory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RefCntRamDirectory.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RefCntRamDirectory.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RefCntRamDirectory.java Wed Feb 9 09:35:27 2011
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package org.apache.solr.core;
import java.io.IOException;
@@ -34,6 +51,7 @@ public class RefCntRamDirectory extends
}
}
+ @Override
public final synchronized void close() {
decRef();
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RunExecutableListener.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RunExecutableListener.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RunExecutableListener.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/RunExecutableListener.java Wed Feb 9 09:35:27 2011
@@ -37,6 +37,7 @@ class RunExecutableListener extends Abst
protected String[] envp;
protected boolean wait=true;
+ @Override
public void init(NamedList args) {
super.init(args);
@@ -102,6 +103,7 @@ class RunExecutableListener extends Abst
}
+ @Override
public void postCommit() {
// anything generic need to be passed to the external program?
// the directory of the index? the command that caused it to be
@@ -109,6 +111,7 @@ class RunExecutableListener extends Abst
exec("postCommit");
}
+ @Override
public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) {
exec("newSearcher");
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/SolrConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/SolrConfig.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/SolrConfig.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/SolrConfig.java Wed Feb 9 09:35:27 2011
@@ -141,7 +141,7 @@ public class SolrConfig extends Config {
filtOptThreshold = getFloat("query/boolTofilterOptimizer/@threshold",.05f);
useFilterForSortedQuery = getBool("query/useFilterForSortedQuery", false);
- queryResultWindowSize = getInt("query/queryResultWindowSize", 1);
+ queryResultWindowSize = Math.max(1, getInt("query/queryResultWindowSize", 1));
queryResultMaxDocsCached = getInt("query/queryResultMaxDocsCached", Integer.MAX_VALUE);
enableLazyFieldLoading = getBool("query/enableLazyFieldLoading", false);
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/SolrCore.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/SolrCore.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/SolrCore.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/SolrCore.java Wed Feb 9 09:35:27 2011
@@ -696,6 +696,7 @@ public final class SolrCore implements S
return refCount.get() <= 0;
}
+ @Override
protected void finalize() throws Throwable {
try {
if (getOpenCount() != 0) {
@@ -1005,7 +1006,7 @@ public final class SolrCore implements S
if (newestSearcher != null && solrConfig.reopenReaders
&& indexDirFile.equals(newIndexDirFile)) {
- IndexReader currentReader = newestSearcher.get().getReader();
+ IndexReader currentReader = newestSearcher.get().getIndexReader();
IndexReader newReader = currentReader.reopen();
if (newReader == currentReader) {
@@ -1192,6 +1193,7 @@ public final class SolrCore implements S
private RefCounted<SolrIndexSearcher> newHolder(SolrIndexSearcher newSearcher) {
RefCounted<SolrIndexSearcher> holder = new RefCounted<SolrIndexSearcher>(newSearcher) {
+ @Override
public void close() {
try {
synchronized(searcherLock) {
@@ -1286,7 +1288,7 @@ public final class SolrCore implements S
rsp.add("responseHeader", responseHeader);
// toLog is a local ref to the same NamedList used by the request
- NamedList toLog = rsp.getToLog();
+ NamedList<Object> toLog = rsp.getToLog();
// for back compat, we set these now just in case other code
// are expecting them during handleRequest
toLog.add("webapp", req.getContext().get("webapp"));
@@ -1312,7 +1314,7 @@ public final class SolrCore implements S
public static void setResponseHeaderValues(SolrRequestHandler handler, SolrQueryRequest req, SolrQueryResponse rsp) {
// TODO should check that responseHeader has not been replaced by handler
- NamedList responseHeader = rsp.getResponseHeader();
+ NamedList<Object> responseHeader = rsp.getResponseHeader();
final int qtime=(int)(rsp.getEndTime() - req.getStartTime());
int status = 0;
Exception exception = rsp.getException();
@@ -1586,7 +1588,7 @@ public final class SolrCore implements S
}
public NamedList getStatistics() {
- NamedList lst = new SimpleOrderedMap();
+ NamedList<Object> lst = new SimpleOrderedMap<Object>();
lst.add("coreName", name==null ? "(null)" : name);
lst.add("startTime", new Date(startTime));
lst.add("refCount", getOpenCount());
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/StandardDirectoryFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/StandardDirectoryFactory.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/StandardDirectoryFactory.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/StandardDirectoryFactory.java Wed Feb 9 09:35:27 2011
@@ -28,6 +28,7 @@ import org.apache.lucene.store.FSDirecto
*/
public class StandardDirectoryFactory extends DirectoryFactory {
+ @Override
public Directory open(String path) throws IOException {
return FSDirectory.open(new File(path));
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/StandardIndexReaderFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/StandardIndexReaderFactory.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/StandardIndexReaderFactory.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/core/StandardIndexReaderFactory.java Wed Feb 9 09:35:27 2011
@@ -32,6 +32,7 @@ public class StandardIndexReaderFactory
/* (non-Javadoc)
* @see org.apache.solr.core.IndexReaderFactory#newReader(org.apache.lucene.store.Directory, boolean)
*/
+ @Override
public IndexReader newReader(Directory indexDir, boolean readOnly)
throws IOException {
return IndexReader.open(indexDir, null, readOnly, termInfosIndexDivisor);
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java Wed Feb 9 09:35:27 2011
@@ -20,10 +20,14 @@ package org.apache.solr.handler;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CharReader;
import org.apache.lucene.analysis.CharStream;
-import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.*;
import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.index.Payload;
+import org.apache.lucene.util.Attribute;
+import org.apache.lucene.util.AttributeSource;
+import org.apache.lucene.util.AttributeReflector;
+import org.apache.lucene.util.SorterTemplate;
import org.apache.solr.analysis.CharFilterFactory;
import org.apache.solr.analysis.TokenFilterFactory;
import org.apache.solr.analysis.TokenizerChain;
@@ -34,6 +38,9 @@ import org.apache.solr.common.SolrExcept
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.FieldType;
+import org.apache.solr.util.ByteUtils;
+
+import org.apache.noggit.CharArr;
import java.io.IOException;
import java.io.StringReader;
@@ -47,6 +54,9 @@ import java.util.*;
*/
public abstract class AnalysisRequestHandlerBase extends RequestHandlerBase {
+ public static final Set<BytesRef> EMPTY_BYTES_SET = Collections.emptySet();
+
+ @Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
rsp.add("analysis", doAnalysis(req));
}
@@ -70,7 +80,7 @@ public abstract class AnalysisRequestHan
*
* @return NamedList containing the tokens produced by analyzing the given value
*/
- protected NamedList<List<NamedList>> analyzeValue(String value, AnalysisContext context) {
+ protected NamedList<? extends Object> analyzeValue(String value, AnalysisContext context) {
Analyzer analyzer = context.getAnalyzer();
@@ -93,7 +103,7 @@ public abstract class AnalysisRequestHan
TokenizerFactory tfac = tokenizerChain.getTokenizerFactory();
TokenFilterFactory[] filtfacs = tokenizerChain.getTokenFilterFactories();
- NamedList<List<NamedList>> namedList = new NamedList<List<NamedList>>();
+ NamedList<Object> namedList = new NamedList<Object>();
if( cfiltfacs != null ){
String source = value;
@@ -105,7 +115,7 @@ public abstract class AnalysisRequestHan
}
TokenStream tokenStream = tfac.create(tokenizerChain.charStream(new StringReader(value)));
- List<Token> tokens = analyzeTokenStream(tokenStream);
+ List<AttributeSource> tokens = analyzeTokenStream(tokenStream);
namedList.add(tokenStream.getClass().getName(), convertTokensToNamedLists(tokens, context));
@@ -113,7 +123,7 @@ public abstract class AnalysisRequestHan
for (TokenFilterFactory tokenFilterFactory : filtfacs) {
tokenStream = tokenFilterFactory.create(listBasedTokenStream);
- List<Token> tokenList = analyzeTokenStream(tokenStream);
+ List<AttributeSource> tokenList = analyzeTokenStream(tokenStream);
namedList.add(tokenStream.getClass().getName(), convertTokensToNamedLists(tokenList, context));
listBasedTokenStream = new ListBasedTokenStream(tokenList);
}
@@ -124,14 +134,24 @@ public abstract class AnalysisRequestHan
/**
* Analyzes the given text using the given analyzer and returns the produced tokens.
*
- * @param value The value to analyze.
+ * @param query The query to analyze.
* @param analyzer The analyzer to use.
- *
- * @return The produces token list.
*/
- protected List<Token> analyzeValue(String value, Analyzer analyzer) {
- TokenStream tokenStream = analyzer.tokenStream("", new StringReader(value));
- return analyzeTokenStream(tokenStream);
+ protected Set<BytesRef> getQueryTokenSet(String query, Analyzer analyzer) {
+ final Set<BytesRef> tokens = new HashSet<BytesRef>();
+ final TokenStream tokenStream = analyzer.tokenStream("", new StringReader(query));
+ final TermToBytesRefAttribute bytesAtt = tokenStream.getAttribute(TermToBytesRefAttribute.class);
+ try {
+ tokenStream.reset();
+ while (tokenStream.incrementToken()) {
+ final BytesRef bytes = new BytesRef();
+ bytesAtt.toBytesRef(bytes);
+ tokens.add(bytes);
+ }
+ } catch (IOException ioe) {
+ throw new RuntimeException("Error occured while iterating over tokenstream", ioe);
+ }
+ return tokens;
}
/**
@@ -141,41 +161,17 @@ public abstract class AnalysisRequestHan
*
* @return List of tokens produced from the TokenStream
*/
- private List<Token> analyzeTokenStream(TokenStream tokenStream) {
- List<Token> tokens = new ArrayList<Token>();
-
- // TODO change this API to support custom attributes
- CharTermAttribute termAtt = null;
- TermToBytesRefAttribute bytesAtt = null;
- if (tokenStream.hasAttribute(CharTermAttribute.class)) {
- termAtt = tokenStream.getAttribute(CharTermAttribute.class);
- } else if (tokenStream.hasAttribute(TermToBytesRefAttribute.class)) {
- bytesAtt = tokenStream.getAttribute(TermToBytesRefAttribute.class);
- }
- final OffsetAttribute offsetAtt = tokenStream.addAttribute(OffsetAttribute.class);
- final TypeAttribute typeAtt = tokenStream.addAttribute(TypeAttribute.class);
- final PositionIncrementAttribute posIncAtt = tokenStream.addAttribute(PositionIncrementAttribute.class);
- final FlagsAttribute flagsAtt = tokenStream.addAttribute(FlagsAttribute.class);
- final PayloadAttribute payloadAtt = tokenStream.addAttribute(PayloadAttribute.class);
-
+ private List<AttributeSource> analyzeTokenStream(TokenStream tokenStream) {
+ List<AttributeSource> tokens = new ArrayList<AttributeSource>();
+ // for backwards compatibility, add all "common" attributes
+ tokenStream.addAttribute(PositionIncrementAttribute.class);
+ tokenStream.addAttribute(OffsetAttribute.class);
+ tokenStream.addAttribute(TypeAttribute.class);
final BytesRef bytes = new BytesRef();
try {
+ tokenStream.reset();
while (tokenStream.incrementToken()) {
- Token token = new Token();
- if (termAtt != null) {
- token.setEmpty().append(termAtt);
- }
- if (bytesAtt != null) {
- bytesAtt.toBytesRef(bytes);
- // TODO: This is incorrect when numeric fields change in later lucene versions. It should use BytesRef directly!
- token.setEmpty().append(bytes.utf8ToString());
- }
- token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
- token.setType(typeAtt.type());
- token.setFlags(flagsAtt.getFlags());
- token.setPayload(payloadAtt.getPayload());
- token.setPositionIncrement(posIncAtt.getPositionIncrement());
- tokens.add((Token) token.clone());
+ tokens.add(tokenStream.cloneAttributes());
}
} catch (IOException ioe) {
throw new RuntimeException("Error occured while iterating over tokenstream", ioe);
@@ -184,6 +180,13 @@ public abstract class AnalysisRequestHan
return tokens;
}
+ // a static mapping of the reflected attribute keys to the names used in Solr 1.4
+ static Map<String,String> ATTRIBUTE_MAPPING = Collections.unmodifiableMap(new HashMap<String,String>() {{
+ put(OffsetAttribute.class.getName() + "#startOffset", "start");
+ put(OffsetAttribute.class.getName() + "#endOffset", "end");
+ put(TypeAttribute.class.getName() + "#type", "type");
+ }});
+
/**
* Converts the list of Tokens to a list of NamedLists representing the tokens.
*
@@ -192,41 +195,100 @@ public abstract class AnalysisRequestHan
*
* @return List of NamedLists containing the relevant information taken from the tokens
*/
- private List<NamedList> convertTokensToNamedLists(List<Token> tokens, AnalysisContext context) {
- List<NamedList> tokensNamedLists = new ArrayList<NamedList>();
+ private List<NamedList> convertTokensToNamedLists(final List<AttributeSource> tokens, AnalysisContext context) {
+ final List<NamedList> tokensNamedLists = new ArrayList<NamedList>();
- Collections.sort(tokens, new Comparator<Token>() {
- public int compare(Token o1, Token o2) {
- return o1.endOffset() - o2.endOffset();
+ final int[] positions = new int[tokens.size()];
+ int position = 0;
+ for (int i = 0, c = tokens.size(); i < c; i++) {
+ AttributeSource token = tokens.get(i);
+ position += token.addAttribute(PositionIncrementAttribute.class).getPositionIncrement();
+ positions[i] = position;
+ }
+
+ // sort the tokens by absoulte position
+ new SorterTemplate() {
+ @Override
+ protected void swap(int i, int j) {
+ final int p = positions[i];
+ positions[i] = positions[j];
+ positions[j] = p;
+ Collections.swap(tokens, i, j);
+ }
+
+ @Override
+ protected int compare(int i, int j) {
+ return positions[i] - positions[j];
}
- });
- int position = 0;
+ @Override
+ protected void setPivot(int i) {
+ pivot = positions[i];
+ }
+
+ @Override
+ protected int comparePivot(int j) {
+ return pivot - positions[j];
+ }
+
+ private int pivot;
+ }.mergeSort(0, tokens.size() - 1);
FieldType fieldType = context.getFieldType();
- for (Token token : tokens) {
- NamedList<Object> tokenNamedList = new SimpleOrderedMap<Object>();
+ final BytesRef rawBytes = new BytesRef();
+ final CharArr textBuf = new CharArr();
+ for (int i = 0, c = tokens.size(); i < c; i++) {
+ AttributeSource token = tokens.get(i);
+ final NamedList<Object> tokenNamedList = new SimpleOrderedMap<Object>();
+ token.getAttribute(TermToBytesRefAttribute.class).toBytesRef(rawBytes);
+
+ textBuf.reset();
+ fieldType.indexedToReadable(rawBytes, textBuf);
+ final String text = textBuf.toString();
- String text = fieldType.indexedToReadable(token.toString());
tokenNamedList.add("text", text);
- if (!text.equals(token.toString())) {
- tokenNamedList.add("raw_text", token.toString());
+
+ if (token.hasAttribute(CharTermAttribute.class)) {
+ final String rawText = token.getAttribute(CharTermAttribute.class).toString();
+ if (!rawText.equals(text)) {
+ tokenNamedList.add("raw_text", rawText);
+ }
}
- tokenNamedList.add("type", token.type());
- tokenNamedList.add("start", token.startOffset());
- tokenNamedList.add("end", token.endOffset());
- position += token.getPositionIncrement();
- tokenNamedList.add("position", position);
+ tokenNamedList.add("raw_bytes", rawBytes.toString());
- if (context.getTermsToMatch().contains(token.toString())) {
+ if (context.getTermsToMatch().contains(rawBytes)) {
tokenNamedList.add("match", true);
}
- if (token.getPayload() != null) {
- tokenNamedList.add("payload", token.getPayload());
- }
+ tokenNamedList.add("position", positions[i]);
+
+ token.reflectWith(new AttributeReflector() {
+ public void reflect(Class<? extends Attribute> attClass, String key, Object value) {
+ // leave out position and bytes term
+ if (TermToBytesRefAttribute.class.isAssignableFrom(attClass))
+ return;
+ if (CharTermAttribute.class.isAssignableFrom(attClass))
+ return;
+ if (PositionIncrementAttribute.class.isAssignableFrom(attClass))
+ return;
+
+ String k = attClass.getName() + '#' + key;
+
+ // map keys for "standard attributes":
+ if (ATTRIBUTE_MAPPING.containsKey(k)) {
+ k = ATTRIBUTE_MAPPING.get(k);
+ }
+
+ if (value instanceof Payload) {
+ final Payload p = (Payload) value;
+ value = new BytesRef(p.getData()).toString();
+ }
+
+ tokenNamedList.add(k, value);
+ }
+ });
tokensNamedLists.add(tokenNamedList);
}
@@ -234,7 +296,7 @@ public abstract class AnalysisRequestHan
return tokensNamedLists;
}
- private String writeCharStream(NamedList out, CharStream input ){
+ private String writeCharStream(NamedList<Object> out, CharStream input ){
final int BUFFER_SIZE = 1024;
char[] buf = new char[BUFFER_SIZE];
int len = 0;
@@ -252,7 +314,6 @@ public abstract class AnalysisRequestHan
return sb.toString();
}
-
// ================================================= Inner classes =================================================
/**
@@ -260,38 +321,27 @@ public abstract class AnalysisRequestHan
*/
// TODO refactor to support custom attributes
protected final static class ListBasedTokenStream extends TokenStream {
- private final List<Token> tokens;
- private Iterator<Token> tokenIterator;
+ private final List<AttributeSource> tokens;
+ private Iterator<AttributeSource> tokenIterator;
- private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
- private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
- private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class);
- private final FlagsAttribute flagsAtt = addAttribute(FlagsAttribute.class);
- private final PayloadAttribute payloadAtt = addAttribute(PayloadAttribute.class);
- private final PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class);
/**
* Creates a new ListBasedTokenStream which uses the given tokens as its token source.
*
* @param tokens Source of tokens to be used
*/
- ListBasedTokenStream(List<Token> tokens) {
+ ListBasedTokenStream(List<AttributeSource> tokens) {
this.tokens = tokens;
tokenIterator = tokens.iterator();
}
- /**
- * {@inheritDoc}
- */
@Override
public boolean incrementToken() throws IOException {
if (tokenIterator.hasNext()) {
- Token next = tokenIterator.next();
- termAtt.copyBuffer(next.buffer(), 0, next.length());
- typeAtt.setType(next.type());
- offsetAtt.setOffset(next.startOffset(), next.endOffset());
- flagsAtt.setFlags(next.getFlags());
- payloadAtt.setPayload(next.getPayload());
- posIncAtt.setPositionIncrement(next.getPositionIncrement());
+ AttributeSource next = tokenIterator.next();
+ Iterator<Class<? extends Attribute>> atts = next.getAttributeClassesIterator();
+ while (atts.hasNext()) // make sure all att impls in the token exist here
+ addAttribute(atts.next());
+ next.copyTo(this);
return true;
} else {
return false;
@@ -313,7 +363,7 @@ public abstract class AnalysisRequestHan
private final String fieldName;
private final FieldType fieldType;
private final Analyzer analyzer;
- private final Set<String> termsToMatch;
+ private final Set<BytesRef> termsToMatch;
/**
* Constructs a new AnalysisContext with a given field tpe, analyzer and
@@ -327,7 +377,7 @@ public abstract class AnalysisRequestHan
* @param termsToMatch Holds all the terms that should match during the
* analysis process.
*/
- public AnalysisContext(FieldType fieldType, Analyzer analyzer, Set<String> termsToMatch) {
+ public AnalysisContext(FieldType fieldType, Analyzer analyzer, Set<BytesRef> termsToMatch) {
this(null, fieldType, analyzer, termsToMatch);
}
@@ -342,7 +392,7 @@ public abstract class AnalysisRequestHan
*
*/
public AnalysisContext(String fieldName, FieldType fieldType, Analyzer analyzer) {
- this(fieldName, fieldType, analyzer, Collections.EMPTY_SET);
+ this(fieldName, fieldType, analyzer, EMPTY_BYTES_SET);
}
/**
@@ -358,7 +408,7 @@ public abstract class AnalysisRequestHan
* @param termsToMatch Holds all the terms that should match during the
* analysis process.
*/
- public AnalysisContext(String fieldName, FieldType fieldType, Analyzer analyzer, Set<String> termsToMatch) {
+ public AnalysisContext(String fieldName, FieldType fieldType, Analyzer analyzer, Set<BytesRef> termsToMatch) {
this.fieldName = fieldName;
this.fieldType = fieldType;
this.analyzer = analyzer;
@@ -377,7 +427,7 @@ public abstract class AnalysisRequestHan
return analyzer;
}
- public Set<String> getTermsToMatch() {
+ public Set<BytesRef> getTermsToMatch() {
return termsToMatch;
}
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/BinaryUpdateRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/BinaryUpdateRequestHandler.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/BinaryUpdateRequestHandler.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/BinaryUpdateRequestHandler.java Wed Feb 9 09:35:27 2011
@@ -46,8 +46,10 @@ import java.util.List;
public class BinaryUpdateRequestHandler extends ContentStreamHandlerBase {
+ @Override
protected ContentStreamLoader newLoader(SolrQueryRequest req, final UpdateRequestProcessor processor) {
return new ContentStreamLoader() {
+ @Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream) throws Exception {
InputStream is = null;
try {
@@ -111,18 +113,22 @@ public class BinaryUpdateRequestHandler
}
}
+ @Override
public String getDescription() {
return "Add/Update multiple documents with javabin format";
}
+ @Override
public String getSourceId() {
return "$Id$";
}
+ @Override
public String getSource() {
return "$URL$";
}
+ @Override
public String getVersion() {
return "$Revision$";
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/CSVRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/CSVRequestHandler.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/CSVRequestHandler.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/CSVRequestHandler.java Wed Feb 9 09:35:27 2011
@@ -42,6 +42,7 @@ import java.io.*;
public class CSVRequestHandler extends ContentStreamHandlerBase {
+ @Override
protected ContentStreamLoader newLoader(SolrQueryRequest req, UpdateRequestProcessor processor) {
return new SingleThreadedCSVLoader(req, processor);
}
@@ -118,6 +119,7 @@ abstract class CSVLoader extends Content
/** add zero length fields */
private class FieldAdderEmpty extends CSVLoader.FieldAdder {
+ @Override
void add(SolrInputDocument doc, int line, int column, String val) {
doc.addField(fields[column].getName(),val,1.0f);
}
@@ -127,6 +129,7 @@ abstract class CSVLoader extends Content
private class FieldTrimmer extends CSVLoader.FieldAdder {
private final CSVLoader.FieldAdder base;
FieldTrimmer(CSVLoader.FieldAdder base) { this.base=base; }
+ @Override
void add(SolrInputDocument doc, int line, int column, String val) {
base.add(doc, line, column, val.trim());
}
@@ -145,6 +148,7 @@ abstract class CSVLoader extends Content
this.to=to;
this.base=base;
}
+ @Override
void add(SolrInputDocument doc, int line, int column, String val) {
if (from.equals(val)) val=to;
base.add(doc,line,column,val);
@@ -162,6 +166,7 @@ abstract class CSVLoader extends Content
this.base = base;
}
+ @Override
void add(SolrInputDocument doc, int line, int column, String val) {
CSVParser parser = new CSVParser(new StringReader(val), strategy);
try {
@@ -305,13 +310,29 @@ abstract class CSVLoader extends Content
private void input_err(String msg, String[] line, int lineno) {
StringBuilder sb = new StringBuilder();
- sb.append(errHeader+", line="+lineno + ","+msg+"\n\tvalues={");
- for (String val: line) { sb.append("'"+val+"',"); }
+ sb.append(errHeader).append(", line=").append(lineno).append(",").append(msg).append("\n\tvalues={");
+ for (String val: line) {
+ sb.append("'").append(val).append("',"); }
sb.append('}');
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,sb.toString());
}
+ private void input_err(String msg, String[] lines, int lineNo, Throwable e) {
+ StringBuilder sb = new StringBuilder();
+ sb.append(errHeader).append(", line=").append(lineNo).append(",").append(msg).append("\n\tvalues={");
+ if (lines != null) {
+ for (String val : lines) {
+ sb.append("'").append(val).append("',");
+ }
+ } else {
+ sb.append("NO LINES AVAILABLE");
+ }
+ sb.append('}');
+ throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,sb.toString(), e);
+ }
+
/** load the CSV input */
+ @Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream) throws IOException {
errHeader = "CSVLoader: input=" + stream.getSourceInfo();
Reader reader = null;
@@ -341,7 +362,13 @@ abstract class CSVLoader extends Content
// read the rest of the CSV file
for(;;) {
int line = parser.getLineNumber(); // for error reporting in MT mode
- String[] vals = parser.getLine();
+ String[] vals = null;
+ try {
+ vals = parser.getLine();
+ } catch (IOException e) {
+ //Catch the exception and rethrow it with more line information
+ input_err("can't read line: " + line, null, line, e);
+ }
if (vals==null) break;
if (vals.length != fields.length) {
@@ -382,6 +409,7 @@ class SingleThreadedCSVLoader extends CS
super(req, processor);
}
+ @Override
void addDoc(int line, String[] vals) throws IOException {
templateAdd.indexedId = null;
SolrInputDocument doc = new SolrInputDocument();
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/ContentStreamHandlerBase.java Wed Feb 9 09:35:27 2011
@@ -32,6 +32,7 @@ import org.apache.solr.update.processor.
**/
public abstract class ContentStreamHandlerBase extends RequestHandlerBase {
+ @Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
SolrParams params = req.getParams();
UpdateRequestProcessorChain processorChain =
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java Wed Feb 9 09:35:27 2011
@@ -19,7 +19,7 @@ package org.apache.solr.handler;
import org.apache.commons.io.IOUtils;
import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.Token;
+import org.apache.lucene.util.BytesRef;
import org.apache.solr.client.solrj.request.DocumentAnalysisRequest;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
@@ -27,6 +27,7 @@ import org.apache.solr.common.params.Ana
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ContentStream;
+import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.request.SolrQueryRequest;
@@ -41,7 +42,7 @@ import javax.xml.stream.XMLStreamConstan
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import java.io.IOException;
-import java.io.Reader;
+import java.io.InputStream;
import java.util.*;
/**
@@ -105,6 +106,7 @@ public class DocumentAnalysisRequestHand
/**
* {@inheritDoc}
*/
+ @Override
protected NamedList doAnalysis(SolrQueryRequest req) throws Exception {
DocumentAnalysisRequest analysisRequest = resolveAnalysisRequest(req);
return handleAnalysisRequest(analysisRequest, req.getSchema());
@@ -156,10 +158,14 @@ public class DocumentAnalysisRequestHand
request.setShowMatch(showMatch);
ContentStream stream = extractSingleContentStream(req);
- Reader reader = stream.getReader();
- XMLStreamReader parser = inputFactory.createXMLStreamReader(reader);
-
+ InputStream is = null;
+ XMLStreamReader parser = null;
+
try {
+ is = stream.getStream();
+ final String charset = ContentStreamBase.getCharsetFromContentType(stream.getContentType());
+ parser = (charset == null) ?
+ inputFactory.createXMLStreamReader(is) : inputFactory.createXMLStreamReader(is, charset);
while (true) {
int event = parser.next();
@@ -181,8 +187,8 @@ public class DocumentAnalysisRequestHand
}
} finally {
- parser.close();
- IOUtils.closeQuietly(reader);
+ if (parser != null) parser.close();
+ IOUtils.closeQuietly(is);
}
}
@@ -216,23 +222,21 @@ public class DocumentAnalysisRequestHand
FieldType fieldType = schema.getFieldType(name);
- Set<String> termsToMatch = new HashSet<String>();
- if (request.getQuery() != null && request.isShowMatch()) {
- try {
- List<Token> tokens = analyzeValue(request.getQuery(), fieldType.getQueryAnalyzer());
- for (Token token : tokens) {
- termsToMatch.add(token.toString());
- }
- } catch (Exception e) {
- // ignore analysis exceptions since we are applying arbitrary text to all fields
- }
+ final String queryValue = request.getQuery();
+ Set<BytesRef> termsToMatch;
+ try {
+ termsToMatch = (queryValue != null && request.isShowMatch())
+ ? getQueryTokenSet(queryValue, fieldType.getQueryAnalyzer())
+ : EMPTY_BYTES_SET;
+ } catch (Exception e) {
+ // ignore analysis exceptions since we are applying arbitrary text to all fields
+ termsToMatch = EMPTY_BYTES_SET;
}
if (request.getQuery() != null) {
try {
- AnalysisContext analysisContext = new AnalysisContext(fieldType, fieldType.getQueryAnalyzer(), Collections.EMPTY_SET);
- NamedList<List<NamedList>> tokens = analyzeValue(request.getQuery(), analysisContext);
- fieldTokens.add("query", tokens);
+ AnalysisContext analysisContext = new AnalysisContext(fieldType, fieldType.getQueryAnalyzer(), EMPTY_BYTES_SET);
+ fieldTokens.add("query", analyzeValue(request.getQuery(), analysisContext));
} catch (Exception e) {
// ignore analysis exceptions since we are applying arbitrary text to all fields
}
@@ -241,10 +245,11 @@ public class DocumentAnalysisRequestHand
Analyzer analyzer = fieldType.getAnalyzer();
AnalysisContext analysisContext = new AnalysisContext(fieldType, analyzer, termsToMatch);
Collection<Object> fieldValues = document.getFieldValues(name);
- NamedList<NamedList<List<NamedList>>> indexTokens = new SimpleOrderedMap<NamedList<List<NamedList>>>();
+ NamedList<NamedList<? extends Object>> indexTokens
+ = new SimpleOrderedMap<NamedList<? extends Object>>();
for (Object fieldValue : fieldValues) {
- NamedList<List<NamedList>> tokens = analyzeValue(fieldValue.toString(), analysisContext);
- indexTokens.add(String.valueOf(fieldValue), tokens);
+ indexTokens.add(String.valueOf(fieldValue),
+ analyzeValue(fieldValue.toString(), analysisContext));
}
fieldTokens.add("index", indexTokens);
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/DumpRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/DumpRequestHandler.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/DumpRequestHandler.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/DumpRequestHandler.java Wed Feb 9 09:35:27 2011
@@ -18,7 +18,7 @@
package org.apache.solr.handler;
import java.io.IOException;
-import java.io.InputStream;
+import java.io.Reader;
import java.util.ArrayList;
import org.apache.commons.io.IOUtils;
@@ -38,7 +38,7 @@ public class DumpRequestHandler extends
// Write the streams...
if( req.getContentStreams() != null ) {
- ArrayList streams = new ArrayList();
+ ArrayList<NamedList<Object>> streams = new ArrayList<NamedList<Object>>();
// Cycle through each stream
for( ContentStream content : req.getContentStreams() ) {
NamedList<Object> stream = new SimpleOrderedMap<Object>();
@@ -46,11 +46,11 @@ public class DumpRequestHandler extends
stream.add( "sourceInfo", content.getSourceInfo() );
stream.add( "size", content.getSize() );
stream.add( "contentType", content.getContentType() );
- InputStream is = content.getStream();
+ Reader reader = content.getReader();
try {
- stream.add( "stream", IOUtils.toString(is) );
+ stream.add( "stream", IOUtils.toString(reader) );
} finally {
- is.close();
+ reader.close();
}
streams.add( stream );
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/FieldAnalysisRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/FieldAnalysisRequestHandler.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/FieldAnalysisRequestHandler.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/FieldAnalysisRequestHandler.java Wed Feb 9 09:35:27 2011
@@ -17,7 +17,7 @@
package org.apache.solr.handler;
-import org.apache.lucene.analysis.Token;
+import org.apache.lucene.util.BytesRef;
import org.apache.solr.client.solrj.request.FieldAnalysisRequest;
import org.apache.solr.common.params.AnalysisParams;
import org.apache.solr.common.params.CommonParams;
@@ -30,10 +30,7 @@ import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.apache.commons.io.IOUtils;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
+import java.util.*;
import java.io.Reader;
import java.io.IOException;
@@ -97,6 +94,7 @@ public class FieldAnalysisRequestHandler
/**
* {@inheritDoc}
*/
+ @Override
protected NamedList doAnalysis(SolrQueryRequest req) throws Exception {
FieldAnalysisRequest analysisRequest = resolveAnalysisRequest(req);
IndexSchema indexSchema = req.getCore().getSchema();
@@ -222,14 +220,10 @@ public class FieldAnalysisRequestHandler
*/
private NamedList<NamedList> analyzeValues(FieldAnalysisRequest analysisRequest, FieldType fieldType, String fieldName) {
- Set<String> termsToMatch = new HashSet<String>();
- String queryValue = analysisRequest.getQuery();
- if (queryValue != null && analysisRequest.isShowMatch()) {
- List<Token> tokens = analyzeValue(queryValue, fieldType.getQueryAnalyzer());
- for (Token token : tokens) {
- termsToMatch.add(token.toString());
- }
- }
+ final String queryValue = analysisRequest.getQuery();
+ final Set<BytesRef> termsToMatch = (queryValue != null && analysisRequest.isShowMatch())
+ ? getQueryTokenSet(queryValue, fieldType.getQueryAnalyzer())
+ : EMPTY_BYTES_SET;
NamedList<NamedList> analyzeResults = new SimpleOrderedMap<NamedList>();
if (analysisRequest.getFieldValue() != null) {
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/JsonLoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/JsonLoader.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/JsonLoader.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/JsonLoader.java Wed Feb 9 09:35:27 2011
@@ -69,6 +69,7 @@ class JsonLoader extends ContentStreamLo
}
}
+ @SuppressWarnings("fallthrough")
void processUpdate(SolrQueryRequest req, UpdateRequestProcessor processor, JSONParser parser) throws IOException
{
int ev = parser.nextEvent();
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/JsonUpdateRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/JsonUpdateRequestHandler.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/JsonUpdateRequestHandler.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/JsonUpdateRequestHandler.java Wed Feb 9 09:35:27 2011
@@ -35,6 +35,7 @@ public class JsonUpdateRequestHandler ex
super.init(args);
}
+ @Override
protected ContentStreamLoader newLoader(SolrQueryRequest req, UpdateRequestProcessor processor) {
return new JsonLoader(processor);
}
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/MoreLikeThisHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/MoreLikeThisHandler.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/MoreLikeThisHandler.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/MoreLikeThisHandler.java Wed Feb 9 09:35:27 2011
@@ -232,7 +232,7 @@ public class MoreLikeThisHandler extends
// Copied from StandardRequestHandler... perhaps it should be added to doStandardDebug?
if (dbg == true) {
try {
- NamedList<Object> dbgInfo = SolrPluginUtils.doStandardDebug(req, q, mlt.mltquery, mltDocs.docList, dbgQuery, dbgResults);
+ NamedList<Object> dbgInfo = SolrPluginUtils.doStandardDebug(req, q, mlt.getRawMLTQuery(), mltDocs.docList, dbgQuery, dbgResults);
if (null != dbgInfo) {
if (null != filters) {
dbgInfo.add("filter_queries",req.getParams().getParams(CommonParams.FQ));
@@ -279,12 +279,10 @@ public class MoreLikeThisHandler extends
final boolean needDocSet;
Map<String,Float> boostFields;
- Query mltquery; // expose this for debugging
-
public MoreLikeThisHelper( SolrParams params, SolrIndexSearcher searcher )
{
this.searcher = searcher;
- this.reader = searcher.getReader();
+ this.reader = searcher.getIndexReader();
this.uniqueKeyField = searcher.getSchema().getUniqueKeyField();
this.needDocSet = params.getBool(FacetParams.FACET,false);
@@ -310,9 +308,26 @@ public class MoreLikeThisHandler extends
boostFields = SolrPluginUtils.parseFieldBoosts(params.getParams(MoreLikeThisParams.QF));
}
- private void setBoosts(Query mltquery) {
+ private Query rawMLTQuery;
+ private Query boostedMLTQuery;
+ private BooleanQuery realMLTQuery;
+
+ public Query getRawMLTQuery(){
+ return rawMLTQuery;
+ }
+
+ public Query getBoostedMLTQuery(){
+ return boostedMLTQuery;
+ }
+
+ public Query getRealMLTQuery(){
+ return realMLTQuery;
+ }
+
+ private Query getBoostedQuery(Query mltquery) {
+ BooleanQuery boostedQuery = (BooleanQuery)mltquery.clone();
if (boostFields.size() > 0) {
- List clauses = ((BooleanQuery)mltquery).clauses();
+ List clauses = boostedQuery.clauses();
for( Object o : clauses ) {
TermQuery q = (TermQuery)((BooleanClause)o).getQuery();
Float b = this.boostFields.get(q.getTerm().field());
@@ -321,49 +336,51 @@ public class MoreLikeThisHandler extends
}
}
}
+ return boostedQuery;
}
public DocListAndSet getMoreLikeThis( int id, int start, int rows, List<Query> filters, List<InterestingTerm> terms, int flags ) throws IOException
{
Document doc = reader.document(id);
- mltquery = mlt.like(id);
- setBoosts(mltquery);
+ rawMLTQuery = mlt.like(id);
+ boostedMLTQuery = getBoostedQuery( rawMLTQuery );
if( terms != null ) {
- fillInterestingTermsFromMLTQuery( mltquery, terms );
+ fillInterestingTermsFromMLTQuery( rawMLTQuery, terms );
}
// exclude current document from results
- BooleanQuery mltQuery = new BooleanQuery();
- mltQuery.add(mltquery, BooleanClause.Occur.MUST);
- mltQuery.add(
+ realMLTQuery = new BooleanQuery();
+ realMLTQuery.add(boostedMLTQuery, BooleanClause.Occur.MUST);
+ realMLTQuery.add(
new TermQuery(new Term(uniqueKeyField.getName(), uniqueKeyField.getType().storedToIndexed(doc.getFieldable(uniqueKeyField.getName())))),
BooleanClause.Occur.MUST_NOT);
DocListAndSet results = new DocListAndSet();
if (this.needDocSet) {
- results = searcher.getDocListAndSet(mltQuery, filters, null, start, rows, flags);
+ results = searcher.getDocListAndSet(realMLTQuery, filters, null, start, rows, flags);
} else {
- results.docList = searcher.getDocList(mltQuery, filters, null, start, rows, flags);
+ results.docList = searcher.getDocList(realMLTQuery, filters, null, start, rows, flags);
}
return results;
}
public DocListAndSet getMoreLikeThis( Reader reader, int start, int rows, List<Query> filters, List<InterestingTerm> terms, int flags ) throws IOException
{
- mltquery = mlt.like(reader);
- setBoosts(mltquery);
+ rawMLTQuery = mlt.like(reader);
+ boostedMLTQuery = getBoostedQuery( rawMLTQuery );
if( terms != null ) {
- fillInterestingTermsFromMLTQuery( mltquery, terms );
+ fillInterestingTermsFromMLTQuery( boostedMLTQuery, terms );
}
DocListAndSet results = new DocListAndSet();
if (this.needDocSet) {
- results = searcher.getDocListAndSet(mltquery, filters, null, start, rows, flags);
+ results = searcher.getDocListAndSet( boostedMLTQuery, filters, null, start, rows, flags);
} else {
- results.docList = searcher.getDocList(mltquery, filters, null, start, rows, flags);
+ results.docList = searcher.getDocList( boostedMLTQuery, filters, null, start, rows, flags);
}
return results;
}
-
+
+ @Deprecated
public NamedList<DocList> getMoreLikeThese( DocList docs, int rows, int flags ) throws IOException
{
IndexSchema schema = searcher.getSchema();
@@ -382,7 +399,7 @@ public class MoreLikeThisHandler extends
private void fillInterestingTermsFromMLTQuery( Query query, List<InterestingTerm> terms )
{
- List clauses = ((BooleanQuery)mltquery).clauses();
+ List clauses = ((BooleanQuery)query).clauses();
for( Object o : clauses ) {
TermQuery q = (TermQuery)((BooleanClause)o).getQuery();
InterestingTerm it = new InterestingTerm();
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/ReplicationHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/ReplicationHandler.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/ReplicationHandler.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/ReplicationHandler.java Wed Feb 9 09:35:27 2011
@@ -94,10 +94,11 @@ public class ReplicationHandler extends
private volatile IndexCommit indexCommitPoint;
- volatile NamedList snapShootDetails;
+ volatile NamedList<Object> snapShootDetails;
private AtomicBoolean replicationEnabled = new AtomicBoolean(true);
+ @Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
rsp.setHttpCaching(false);
final SolrParams solrParams = req.getParams();
@@ -143,6 +144,7 @@ public class ReplicationHandler extends
}
final SolrParams paramsCopy = new ModifiableSolrParams(solrParams);
new Thread() {
+ @Override
public void run() {
doFetch(paramsCopy);
}
@@ -189,13 +191,13 @@ public class ReplicationHandler extends
}
}
- private List<NamedList> getCommits() {
+ private List<NamedList<Object>> getCommits() {
Map<Long, IndexCommit> commits = core.getDeletionPolicy().getCommits();
- List<NamedList> l = new ArrayList<NamedList>();
+ List<NamedList<Object>> l = new ArrayList<NamedList<Object>>();
for (IndexCommit c : commits.values()) {
try {
- NamedList nl = new NamedList();
+ NamedList<Object> nl = new NamedList<Object>();
nl.add("indexVersion", c.getVersion());
nl.add(GENERATION, c.getGeneration());
nl.add(CMD_GET_FILE_LIST, c.getFileNames());
@@ -281,7 +283,7 @@ public class ReplicationHandler extends
IndexCommit indexCommit = delPolicy.getLatestCommit();
if(indexCommit == null) {
- indexCommit = req.getSearcher().getReader().getIndexCommit();
+ indexCommit = req.getSearcher().getIndexReader().getIndexCommit();
}
// small race here before the commit point is saved
@@ -447,18 +449,22 @@ public class ReplicationHandler extends
return fileMeta;
}
+ @Override
public String getDescription() {
return "ReplicationHandler provides replication of index and configuration files from Master to Slaves";
}
+ @Override
public String getSourceId() {
return "$Id$";
}
+ @Override
public String getSource() {
return "$URL$";
}
+ @Override
public String getVersion() {
return "$Revision$";
}
@@ -481,8 +487,8 @@ public class ReplicationHandler extends
long version[] = new long[2];
RefCounted<SolrIndexSearcher> searcher = core.getSearcher();
try {
- version[0] = searcher.get().getReader().getIndexCommit().getVersion();
- version[1] = searcher.get().getReader().getIndexCommit().getGeneration();
+ version[0] = searcher.get().getIndexReader().getIndexCommit().getVersion();
+ version[1] = searcher.get().getIndexReader().getIndexCommit().getGeneration();
} catch (IOException e) {
LOG.warn("Unable to get index version : ", e);
} finally {
@@ -687,12 +693,12 @@ public class ReplicationHandler extends
LOG.error("Exception while writing replication details: ", e);
}
}
- if (isMaster)
- details.add("master", master);
- if (isSlave && showSlaveDetails)
- details.add("slave", slave);
-
}
+
+ if (isMaster)
+ details.add("master", master);
+ if (isSlave && showSlaveDetails)
+ details.add("slave", slave);
NamedList snapshotStats = snapShootDetails;
if (snapshotStats != null)
@@ -701,7 +707,7 @@ public class ReplicationHandler extends
return details;
}
- private void addVal(NamedList nl, String key, Properties props, Class clzz) {
+ private void addVal(NamedList<Object> nl, String key, Properties props, Class clzz) {
String s = props.getProperty(key);
if (s == null || s.trim().length() == 0) return;
if (clzz == Date.class) {
@@ -823,7 +829,7 @@ public class ReplicationHandler extends
replicateOnStart = true;
RefCounted<SolrIndexSearcher> s = core.getNewestSearcher(false);
try {
- IndexReader reader = s==null ? null : s.get().getReader();
+ IndexReader reader = s==null ? null : s.get().getIndexReader();
if (reader!=null && reader.getIndexCommit() != null && reader.getIndexCommit().getGeneration() != 1L) {
try {
if(replicateOnOptimize){
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/RequestHandlerBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/RequestHandlerBase.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/RequestHandlerBase.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/RequestHandlerBase.java Wed Feb 9 09:35:27 2011
@@ -168,8 +168,8 @@ public abstract class RequestHandlerBase
return null; // this can be overridden, but not required
}
- public NamedList getStatistics() {
- NamedList lst = new SimpleOrderedMap();
+ public NamedList<Object> getStatistics() {
+ NamedList<Object> lst = new SimpleOrderedMap<Object>();
lst.add("handlerStart",handlerStart);
lst.add("requests", numRequests);
lst.add("errors", numErrors);
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/RequestHandlerUtils.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/RequestHandlerUtils.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/RequestHandlerUtils.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/RequestHandlerUtils.java Wed Feb 9 09:35:27 2011
@@ -49,7 +49,7 @@ public class RequestHandlerUtils
* Check the request parameters and decide if it should commit or optimize.
* If it does, it will check parameters for "waitFlush" and "waitSearcher"
*
- * @deprecated Use {@link #handleCommit(UpdateRequestProcessor,SolrParams,boolean)}
+ * @deprecated Use {@link #handleCommit(SolrQueryRequest,UpdateRequestProcessor,SolrParams,boolean)}
*
* @since solr 1.2
*/
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/SnapPuller.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/SnapPuller.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/SnapPuller.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/SnapPuller.java Wed Feb 9 09:35:27 2011
@@ -63,6 +63,9 @@ import java.util.zip.InflaterInputStream
public class SnapPuller {
private static final Logger LOG = LoggerFactory.getLogger(SnapPuller.class.getName());
+ private static final List<Map<String,Object>> EMPTY_LIST_OF_MAPS
+ = Collections.emptyList();
+
private final String masterUrl;
private final ReplicationHandler replicationHandler;
@@ -192,14 +195,14 @@ public class SnapPuller {
return getNamedListResponse(post);
}
- private NamedList getNamedListResponse(PostMethod method) throws IOException {
+ private NamedList<?> getNamedListResponse(PostMethod method) throws IOException {
try {
int status = myHttpClient.executeMethod(method);
if (status != HttpStatus.SC_OK) {
throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE,
"Request failed for the url " + method);
}
- return (NamedList) new JavaBinCodec().unmarshal(method.getResponseBodyAsStream());
+ return (NamedList<?>) new JavaBinCodec().unmarshal(method.getResponseBodyAsStream());
} finally {
try {
method.releaseConnection();
@@ -216,8 +219,12 @@ public class SnapPuller {
post.addParameter(COMMAND, CMD_GET_FILE_LIST);
post.addParameter(CMD_INDEX_VERSION, String.valueOf(version));
post.addParameter("wt", "javabin");
- NamedList nl = getNamedListResponse(post);
- List<Map<String, Object>> f = (List<Map<String, Object>>) nl.get(CMD_GET_FILE_LIST);
+
+ @SuppressWarnings("unchecked")
+ NamedList<List<Map<String, Object>>> nl
+ = (NamedList<List<Map<String, Object>>>) getNamedListResponse(post);
+
+ List<Map<String, Object>> f = nl.get(CMD_GET_FILE_LIST);
if (f != null)
filesToDownload = Collections.synchronizedList(f);
else {
@@ -225,7 +232,7 @@ public class SnapPuller {
LOG.error("No files to download for indexversion: "+ version);
}
- f = (List<Map<String, Object>>) nl.get(CONF_FILES);
+ f = nl.get(CONF_FILES);
if (f != null)
confFilesToDownload = Collections.synchronizedList(f);
}
@@ -262,7 +269,7 @@ public class SnapPuller {
RefCounted<SolrIndexSearcher> searcherRefCounted = null;
try {
searcherRefCounted = core.getNewestSearcher(false);
- commit = searcherRefCounted.get().getReader().getIndexCommit();
+ commit = searcherRefCounted.get().getIndexReader().getIndexCommit();
} finally {
if (searcherRefCounted != null)
searcherRefCounted.decref();
@@ -293,15 +300,17 @@ public class SnapPuller {
isFullCopyNeeded = true;
successfulInstall = false;
boolean deleteTmpIdxDir = true;
+ File indexDir = null ;
try {
- File indexDir = new File(core.getIndexDir());
+ indexDir = new File(core.getIndexDir());
downloadIndexFiles(isFullCopyNeeded, tmpIndexDir, latestVersion);
LOG.info("Total time taken for download : " + ((System.currentTimeMillis() - replicationStartTime) / 1000) + " secs");
Collection<Map<String, Object>> modifiedConfFiles = getModifiedConfFiles(confFilesToDownload);
if (!modifiedConfFiles.isEmpty()) {
downloadConfFiles(confFilesToDownload, latestVersion);
if (isFullCopyNeeded) {
- modifyIndexProps(tmpIndexDir.getName());
+ successfulInstall = modifyIndexProps(tmpIndexDir.getName());
+ deleteTmpIdxDir = false;
} else {
successfulInstall = copyIndexFiles(tmpIndexDir, indexDir);
}
@@ -332,7 +341,8 @@ public class SnapPuller {
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Index fetch failed : ", e);
} finally {
- if(deleteTmpIdxDir) delTree(tmpIndexDir);
+ if (deleteTmpIdxDir) delTree(tmpIndexDir);
+ else delTree(indexDir);
}
return successfulInstall;
} finally {
@@ -498,6 +508,7 @@ public class SnapPuller {
private void reloadCore() {
new Thread() {
+ @Override
public void run() {
try {
solrCore.getCoreDescriptor().getCoreContainer().reload(solrCore.getName());
@@ -704,10 +715,11 @@ public class SnapPuller {
*/
private Collection<Map<String, Object>> getModifiedConfFiles(List<Map<String, Object>> confFilesToDownload) {
if (confFilesToDownload == null || confFilesToDownload.isEmpty())
- return Collections.EMPTY_LIST;
+ return EMPTY_LIST_OF_MAPS;
+
//build a map with alias/name as the key
Map<String, Map<String, Object>> nameVsFile = new HashMap<String, Map<String, Object>>();
- NamedList names = new NamedList();
+ NamedList<String> names = new NamedList<String>();
for (Map<String, Object> map : confFilesToDownload) {
//if alias is present that is the name the file may have in the slave
String name = (String) (map.get(ALIAS) == null ? map.get(NAME) : map.get(ALIAS));
@@ -725,7 +737,7 @@ public class SnapPuller {
nameVsFile.remove(name); //checksums are same so the file need not be downloaded
}
}
- return nameVsFile.isEmpty() ? Collections.EMPTY_LIST : nameVsFile.values();
+ return nameVsFile.isEmpty() ? EMPTY_LIST_OF_MAPS : nameVsFile.values();
}
/**
@@ -788,25 +800,25 @@ public class SnapPuller {
//make a copy first because it can be null later
List<Map<String, Object>> tmp = confFilesToDownload;
//create a new instance. or else iterator may fail
- return tmp == null ? Collections.EMPTY_LIST : new ArrayList<Map<String, Object>>(tmp);
+ return tmp == null ? EMPTY_LIST_OF_MAPS : new ArrayList<Map<String, Object>>(tmp);
}
List<Map<String, Object>> getConfFilesDownloaded() {
//make a copy first because it can be null later
List<Map<String, Object>> tmp = confFilesDownloaded;
// NOTE: it's safe to make a copy of a SynchronizedCollection(ArrayList)
- return tmp == null ? Collections.EMPTY_LIST : new ArrayList<Map<String, Object>>(tmp);
+ return tmp == null ? EMPTY_LIST_OF_MAPS : new ArrayList<Map<String, Object>>(tmp);
}
List<Map<String, Object>> getFilesToDownload() {
//make a copy first because it can be null later
List<Map<String, Object>> tmp = filesToDownload;
- return tmp == null ? Collections.EMPTY_LIST : new ArrayList<Map<String, Object>>(tmp);
+ return tmp == null ? EMPTY_LIST_OF_MAPS : new ArrayList<Map<String, Object>>(tmp);
}
List<Map<String, Object>> getFilesDownloaded() {
List<Map<String, Object>> tmp = filesDownloaded;
- return tmp == null ? Collections.EMPTY_LIST : new ArrayList<Map<String, Object>>(tmp);
+ return tmp == null ? EMPTY_LIST_OF_MAPS : new ArrayList<Map<String, Object>>(tmp);
}
Map<String, Object> getCurrentFile() {
Modified: lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/SnapShooter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/SnapShooter.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/SnapShooter.java (original)
+++ lucene/dev/branches/docvalues/solr/src/java/org/apache/solr/handler/SnapShooter.java Wed Feb 9 09:35:27 2011
@@ -64,6 +64,7 @@ public class SnapShooter {
replicationHandler.core.getDeletionPolicy().saveCommitPoint(indexCommit.getVersion());
new Thread() {
+ @Override
public void run() {
createSnapshot(indexCommit, replicationHandler);
}
@@ -71,7 +72,8 @@ public class SnapShooter {
}
void createSnapshot(final IndexCommit indexCommit, ReplicationHandler replicationHandler) {
- NamedList details = new NamedList();
+
+ NamedList<Object> details = new NamedList<Object>();
details.add("startTime", new Date().toString());
File snapShotDir = null;
String directoryName = null;