You are viewing a plain text version of this content. The canonical link for it is here.
Posted to java-commits@lucene.apache.org by rm...@apache.org on 2010/04/02 06:36:21 UTC
svn commit: r930161 [2/6] - in /lucene/dev/trunk/solr: ./ client/javascript/
client/ruby/flare/app/controllers/ client/ruby/flare/app/helpers/
client/ruby/flare/config/ client/ruby/flare/config/environments/
client/ruby/flare/db/ client/ruby/flare/publ...
Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java?rev=930161&r1=930160&r2=930161&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java Fri Apr 2 04:36:13 2010
@@ -1,193 +1,193 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.handler.dataimport;
-
-import org.apache.commons.io.IOUtils;
-import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
-import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow;
-import static org.apache.solr.handler.dataimport.DataImporter.COLUMN;
-import static org.apache.solr.handler.dataimport.XPathEntityProcessor.URL;
-import org.apache.tika.config.TikaConfig;
-import org.apache.tika.metadata.Metadata;
-import org.apache.tika.parser.AutoDetectParser;
-import org.apache.tika.parser.Parser;
-import org.apache.tika.parser.ParseContext;
-import org.apache.tika.sax.BodyContentHandler;
-import org.apache.tika.sax.ContentHandlerDecorator;
-import org.apache.tika.sax.XHTMLContentHandler;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.xml.sax.Attributes;
-import org.xml.sax.ContentHandler;
-import org.xml.sax.SAXException;
-import org.xml.sax.helpers.DefaultHandler;
-
-import javax.xml.transform.OutputKeys;
-import javax.xml.transform.TransformerConfigurationException;
-import javax.xml.transform.sax.SAXTransformerFactory;
-import javax.xml.transform.sax.TransformerHandler;
-import javax.xml.transform.stream.StreamResult;
-import java.io.File;
-import java.io.InputStream;
-import java.io.StringWriter;
-import java.io.Writer;
-import java.util.HashMap;
-import java.util.Map;
-/**
- * <p>An implementation of EntityProcessor which reads data from rich docs using Tika
- *
- * @version $Id$
- * @since solr 1.5
- */
-public class TikaEntityProcessor extends EntityProcessorBase {
- private TikaConfig tikaConfig;
- private static final Logger LOG = LoggerFactory.getLogger(TikaEntityProcessor.class);
- private String format = "text";
- private boolean done = false;
- private String parser;
- static final String AUTO_PARSER = "org.apache.tika.parser.AutoDetectParser";
-
-
- @Override
- protected void firstInit(Context context) {
- String tikaConfigFile = context.getResolvedEntityAttribute("tikaConfig");
- if (tikaConfigFile == null) {
- tikaConfig = TikaConfig.getDefaultConfig();
- } else {
- File configFile = new File(tikaConfigFile);
- if (!configFile.isAbsolute()) {
- configFile = new File(context.getSolrCore().getResourceLoader().getConfigDir(), tikaConfigFile);
- }
- try {
- tikaConfig = new TikaConfig(configFile);
- } catch (Exception e) {
- wrapAndThrow (SEVERE, e,"Unable to load Tika Config");
- }
- }
-
- format = context.getResolvedEntityAttribute("format");
- if(format == null)
- format = "text";
- if (!"html".equals(format) && !"xml".equals(format) && !"text".equals(format)&& !"none".equals(format) )
- throw new DataImportHandlerException(SEVERE, "'format' can be one of text|html|xml|none");
- parser = context.getResolvedEntityAttribute("parser");
- if(parser == null) {
- parser = AUTO_PARSER;
- }
- done = false;
- }
-
- public Map<String, Object> nextRow() {
- if(done) return null;
- Map<String, Object> row = new HashMap<String, Object>();
- DataSource<InputStream> dataSource = context.getDataSource();
- InputStream is = dataSource.getData(context.getResolvedEntityAttribute(URL));
- ContentHandler contentHandler = null;
- Metadata metadata = new Metadata();
- StringWriter sw = new StringWriter();
- try {
- if ("html".equals(format)) {
- contentHandler = getHtmlHandler(sw);
- } else if ("xml".equals(format)) {
- contentHandler = getXmlContentHandler(sw);
- } else if ("text".equals(format)) {
- contentHandler = getTextContentHandler(sw);
- } else if("none".equals(format)){
- contentHandler = new DefaultHandler();
- }
- } catch (TransformerConfigurationException e) {
- wrapAndThrow(SEVERE, e, "Unable to create content handler");
- }
- Parser tikaParser = null;
- if(parser.equals(AUTO_PARSER)){
- AutoDetectParser parser = new AutoDetectParser();
- parser.setConfig(tikaConfig);
- tikaParser = parser;
- } else {
- tikaParser = (Parser) context.getSolrCore().getResourceLoader().newInstance(parser);
- }
- try {
- tikaParser.parse(is, contentHandler, metadata , new ParseContext());
- } catch (Exception e) {
- wrapAndThrow(SEVERE, e, "Unable to read content");
- }
- IOUtils.closeQuietly(is);
- for (Map<String, String> field : context.getAllEntityFields()) {
- if (!"true".equals(field.get("meta"))) continue;
- String col = field.get(COLUMN);
- String s = metadata.get(col);
- if (s != null) row.put(col, s);
- }
- if(!"none".equals(format) ) row.put("text", sw.toString());
- done = true;
- return row;
- }
-
- private static ContentHandler getHtmlHandler(Writer writer)
- throws TransformerConfigurationException {
- SAXTransformerFactory factory = (SAXTransformerFactory)
- SAXTransformerFactory.newInstance();
- TransformerHandler handler = factory.newTransformerHandler();
- handler.getTransformer().setOutputProperty(OutputKeys.METHOD, "html");
- handler.setResult(new StreamResult(writer));
- return new ContentHandlerDecorator(handler) {
- @Override
- public void startElement(
- String uri, String localName, String name, Attributes atts)
- throws SAXException {
- if (XHTMLContentHandler.XHTML.equals(uri)) {
- uri = null;
- }
- if (!"head".equals(localName)) {
- super.startElement(uri, localName, name, atts);
- }
- }
-
- @Override
- public void endElement(String uri, String localName, String name)
- throws SAXException {
- if (XHTMLContentHandler.XHTML.equals(uri)) {
- uri = null;
- }
- if (!"head".equals(localName)) {
- super.endElement(uri, localName, name);
- }
- }
-
- @Override
- public void startPrefixMapping(String prefix, String uri) {/*no op*/ }
-
- @Override
- public void endPrefixMapping(String prefix) {/*no op*/ }
- };
- }
-
- private static ContentHandler getTextContentHandler(Writer writer) {
- return new BodyContentHandler(writer);
- }
-
- private static ContentHandler getXmlContentHandler(Writer writer)
- throws TransformerConfigurationException {
- SAXTransformerFactory factory = (SAXTransformerFactory)
- SAXTransformerFactory.newInstance();
- TransformerHandler handler = factory.newTransformerHandler();
- handler.getTransformer().setOutputProperty(OutputKeys.METHOD, "xml");
- handler.setResult(new StreamResult(writer));
- return handler;
- }
-
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import org.apache.commons.io.IOUtils;
+import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
+import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow;
+import static org.apache.solr.handler.dataimport.DataImporter.COLUMN;
+import static org.apache.solr.handler.dataimport.XPathEntityProcessor.URL;
+import org.apache.tika.config.TikaConfig;
+import org.apache.tika.metadata.Metadata;
+import org.apache.tika.parser.AutoDetectParser;
+import org.apache.tika.parser.Parser;
+import org.apache.tika.parser.ParseContext;
+import org.apache.tika.sax.BodyContentHandler;
+import org.apache.tika.sax.ContentHandlerDecorator;
+import org.apache.tika.sax.XHTMLContentHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.xml.sax.Attributes;
+import org.xml.sax.ContentHandler;
+import org.xml.sax.SAXException;
+import org.xml.sax.helpers.DefaultHandler;
+
+import javax.xml.transform.OutputKeys;
+import javax.xml.transform.TransformerConfigurationException;
+import javax.xml.transform.sax.SAXTransformerFactory;
+import javax.xml.transform.sax.TransformerHandler;
+import javax.xml.transform.stream.StreamResult;
+import java.io.File;
+import java.io.InputStream;
+import java.io.StringWriter;
+import java.io.Writer;
+import java.util.HashMap;
+import java.util.Map;
+/**
+ * <p>An implementation of EntityProcessor which reads data from rich docs using Tika
+ *
+ * @version $Id$
+ * @since solr 1.5
+ */
+public class TikaEntityProcessor extends EntityProcessorBase {
+ private TikaConfig tikaConfig;
+ private static final Logger LOG = LoggerFactory.getLogger(TikaEntityProcessor.class);
+ private String format = "text";
+ private boolean done = false;
+ private String parser;
+ static final String AUTO_PARSER = "org.apache.tika.parser.AutoDetectParser";
+
+
+ @Override
+ protected void firstInit(Context context) {
+ String tikaConfigFile = context.getResolvedEntityAttribute("tikaConfig");
+ if (tikaConfigFile == null) {
+ tikaConfig = TikaConfig.getDefaultConfig();
+ } else {
+ File configFile = new File(tikaConfigFile);
+ if (!configFile.isAbsolute()) {
+ configFile = new File(context.getSolrCore().getResourceLoader().getConfigDir(), tikaConfigFile);
+ }
+ try {
+ tikaConfig = new TikaConfig(configFile);
+ } catch (Exception e) {
+ wrapAndThrow (SEVERE, e,"Unable to load Tika Config");
+ }
+ }
+
+ format = context.getResolvedEntityAttribute("format");
+ if(format == null)
+ format = "text";
+ if (!"html".equals(format) && !"xml".equals(format) && !"text".equals(format)&& !"none".equals(format) )
+ throw new DataImportHandlerException(SEVERE, "'format' can be one of text|html|xml|none");
+ parser = context.getResolvedEntityAttribute("parser");
+ if(parser == null) {
+ parser = AUTO_PARSER;
+ }
+ done = false;
+ }
+
+ public Map<String, Object> nextRow() {
+ if(done) return null;
+ Map<String, Object> row = new HashMap<String, Object>();
+ DataSource<InputStream> dataSource = context.getDataSource();
+ InputStream is = dataSource.getData(context.getResolvedEntityAttribute(URL));
+ ContentHandler contentHandler = null;
+ Metadata metadata = new Metadata();
+ StringWriter sw = new StringWriter();
+ try {
+ if ("html".equals(format)) {
+ contentHandler = getHtmlHandler(sw);
+ } else if ("xml".equals(format)) {
+ contentHandler = getXmlContentHandler(sw);
+ } else if ("text".equals(format)) {
+ contentHandler = getTextContentHandler(sw);
+ } else if("none".equals(format)){
+ contentHandler = new DefaultHandler();
+ }
+ } catch (TransformerConfigurationException e) {
+ wrapAndThrow(SEVERE, e, "Unable to create content handler");
+ }
+ Parser tikaParser = null;
+ if(parser.equals(AUTO_PARSER)){
+ AutoDetectParser parser = new AutoDetectParser();
+ parser.setConfig(tikaConfig);
+ tikaParser = parser;
+ } else {
+ tikaParser = (Parser) context.getSolrCore().getResourceLoader().newInstance(parser);
+ }
+ try {
+ tikaParser.parse(is, contentHandler, metadata , new ParseContext());
+ } catch (Exception e) {
+ wrapAndThrow(SEVERE, e, "Unable to read content");
+ }
+ IOUtils.closeQuietly(is);
+ for (Map<String, String> field : context.getAllEntityFields()) {
+ if (!"true".equals(field.get("meta"))) continue;
+ String col = field.get(COLUMN);
+ String s = metadata.get(col);
+ if (s != null) row.put(col, s);
+ }
+ if(!"none".equals(format) ) row.put("text", sw.toString());
+ done = true;
+ return row;
+ }
+
+ private static ContentHandler getHtmlHandler(Writer writer)
+ throws TransformerConfigurationException {
+ SAXTransformerFactory factory = (SAXTransformerFactory)
+ SAXTransformerFactory.newInstance();
+ TransformerHandler handler = factory.newTransformerHandler();
+ handler.getTransformer().setOutputProperty(OutputKeys.METHOD, "html");
+ handler.setResult(new StreamResult(writer));
+ return new ContentHandlerDecorator(handler) {
+ @Override
+ public void startElement(
+ String uri, String localName, String name, Attributes atts)
+ throws SAXException {
+ if (XHTMLContentHandler.XHTML.equals(uri)) {
+ uri = null;
+ }
+ if (!"head".equals(localName)) {
+ super.startElement(uri, localName, name, atts);
+ }
+ }
+
+ @Override
+ public void endElement(String uri, String localName, String name)
+ throws SAXException {
+ if (XHTMLContentHandler.XHTML.equals(uri)) {
+ uri = null;
+ }
+ if (!"head".equals(localName)) {
+ super.endElement(uri, localName, name);
+ }
+ }
+
+ @Override
+ public void startPrefixMapping(String prefix, String uri) {/*no op*/ }
+
+ @Override
+ public void endPrefixMapping(String prefix) {/*no op*/ }
+ };
+ }
+
+ private static ContentHandler getTextContentHandler(Writer writer) {
+ return new BodyContentHandler(writer);
+ }
+
+ private static ContentHandler getXmlContentHandler(Writer writer)
+ throws TransformerConfigurationException {
+ SAXTransformerFactory factory = (SAXTransformerFactory)
+ SAXTransformerFactory.newInstance();
+ TransformerHandler handler = factory.newTransformerHandler();
+ handler.getTransformer().setOutputProperty(OutputKeys.METHOD, "xml");
+ handler.setResult(new StreamResult(writer));
+ return handler;
+ }
+
+}
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/main/java/org/apache/solr/handler/dataimport/TikaEntityProcessor.java
------------------------------------------------------------------------------
svn:eol-style = native
Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java?rev=930161&r1=930160&r2=930161&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java Fri Apr 2 04:36:13 2010
@@ -1,61 +1,61 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.handler.dataimport;
-
-import org.junit.After;
-import org.junit.Before;
-
-/**Testcase for TikaEntityProcessor
- * @version $Id$
- * @since solr 1.5
- */
-public class TestTikaEntityProcessor extends AbstractDataImportHandlerTest {
-
- @Before
- public void setUp() throws Exception {
- super.setUp();
- }
-
- @After
- public void tearDown() throws Exception {
- super.tearDown();
- }
-
- public String getSchemaFile() {
- return "dataimport-schema-no-unique-key.xml";
- }
-
- public String getSolrConfigFile() {
- return "dataimport-solrconfig.xml";
- }
-
- public void testIndexingWithTikaEntityProcessor() throws Exception {
- String conf =
- "<dataConfig>" +
- " <dataSource type=\"BinFileDataSource\"/>" +
- " <document>" +
- " <entity processor=\"TikaEntityProcessor\" url=\"../../../../../extraction/src/test/resources/solr-word.pdf\" >" +
- " <field column=\"Author\" meta=\"true\" name=\"author\"/>" +
- " <field column=\"title\" meta=\"true\" name=\"docTitle\"/>" +
- " <field column=\"text\"/>" +
- " </entity>" +
- " </document>" +
- "</dataConfig>";
- super.runFullImport(conf);
- assertQ(req("*:*"), "//*[@numFound='1']");
- }
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import org.junit.After;
+import org.junit.Before;
+
+/**Testcase for TikaEntityProcessor
+ * @version $Id$
+ * @since solr 1.5
+ */
+public class TestTikaEntityProcessor extends AbstractDataImportHandlerTest {
+
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ super.tearDown();
+ }
+
+ public String getSchemaFile() {
+ return "dataimport-schema-no-unique-key.xml";
+ }
+
+ public String getSolrConfigFile() {
+ return "dataimport-solrconfig.xml";
+ }
+
+ public void testIndexingWithTikaEntityProcessor() throws Exception {
+ String conf =
+ "<dataConfig>" +
+ " <dataSource type=\"BinFileDataSource\"/>" +
+ " <document>" +
+ " <entity processor=\"TikaEntityProcessor\" url=\"../../../../../extraction/src/test/resources/solr-word.pdf\" >" +
+ " <field column=\"Author\" meta=\"true\" name=\"author\"/>" +
+ " <field column=\"title\" meta=\"true\" name=\"docTitle\"/>" +
+ " <field column=\"text\"/>" +
+ " </entity>" +
+ " </document>" +
+ "</dataConfig>";
+ super.runFullImport(conf);
+ assertQ(req("*:*"), "//*[@numFound='1']");
+ }
+}
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java
------------------------------------------------------------------------------
svn:eol-style = native
Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-schema-no-unique-key.xml
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-schema-no-unique-key.xml?rev=930161&r1=930160&r2=930161&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-schema-no-unique-key.xml (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-schema-no-unique-key.xml Fri Apr 2 04:36:13 2010
@@ -1,205 +1,205 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!--
- This is the Solr schema file. This file should be named "schema.xml" and
- should be in the conf directory under the solr home
- (i.e. ./solr/conf/schema.xml by default)
- or located where the classloader for the Solr webapp can find it.
-
- This example schema is the recommended starting point for users.
- It should be kept correct and concise, usable out-of-the-box.
-
- For more information, on how to customize this file, please see
- http://wiki.apache.org/solr/SchemaXml
--->
-
-<schema name="test" version="1.2">
- <!-- attribute "name" is the name of this schema and is only used for display purposes.
- Applications should change this to reflect the nature of the search collection.
- version="1.1" is Solr's version number for the schema syntax and semantics. It should
- not normally be changed by applications.
- 1.0: multiValued attribute did not exist, all fields are multiValued by nature
- 1.1: multiValued attribute introduced, false by default -->
-
- <types>
- <!-- field type definitions. The "name" attribute is
- just a label to be used by field definitions. The "class"
- attribute and any other attributes determine the real
- behavior of the fieldType.
- Class names starting with "solr" refer to java classes in the
- org.apache.solr.analysis package.
- -->
-
- <!-- The StrField type is not analyzed, but indexed/stored verbatim.
- - StrField and TextField support an optional compressThreshold which
- limits compression (if enabled in the derived fields) to values which
- exceed a certain size (in characters).
- -->
- <fieldType name="string" class="solr.StrField" sortMissingLast="true" omitNorms="true"/>
-
- <!-- boolean type: "true" or "false" -->
- <fieldType name="boolean" class="solr.BoolField" sortMissingLast="true" omitNorms="true"/>
-
- <!-- The optional sortMissingLast and sortMissingFirst attributes are
- currently supported on types that are sorted internally as strings.
- - If sortMissingLast="true", then a sort on this field will cause documents
- without the field to come after documents with the field,
- regardless of the requested sort order (asc or desc).
- - If sortMissingFirst="true", then a sort on this field will cause documents
- without the field to come before documents with the field,
- regardless of the requested sort order.
- - If sortMissingLast="false" and sortMissingFirst="false" (the default),
- then default lucene sorting will be used which places docs without the
- field first in an ascending sort and last in a descending sort.
- -->
-
-
- <!-- numeric field types that store and index the text
- value verbatim (and hence don't support range queries, since the
- lexicographic ordering isn't equal to the numeric ordering) -->
- <fieldType name="integer" class="solr.IntField" omitNorms="true"/>
- <fieldType name="long" class="solr.LongField" omitNorms="true"/>
- <fieldType name="float" class="solr.FloatField" omitNorms="true"/>
- <fieldType name="double" class="solr.DoubleField" omitNorms="true"/>
-
-
- <!-- Numeric field types that manipulate the value into
- a string value that isn't human-readable in its internal form,
- but with a lexicographic ordering the same as the numeric ordering,
- so that range queries work correctly. -->
- <fieldType name="sint" class="solr.SortableIntField" sortMissingLast="true" omitNorms="true"/>
- <fieldType name="slong" class="solr.SortableLongField" sortMissingLast="true" omitNorms="true"/>
- <fieldType name="sfloat" class="solr.SortableFloatField" sortMissingLast="true" omitNorms="true"/>
- <fieldType name="sdouble" class="solr.SortableDoubleField" sortMissingLast="true" omitNorms="true"/>
-
-
- <!-- The format for this date field is of the form 1995-12-31T23:59:59Z, and
- is a more restricted form of the canonical representation of dateTime
- http://www.w3.org/TR/xmlschema-2/#dateTime
- The trailing "Z" designates UTC time and is mandatory.
- Optional fractional seconds are allowed: 1995-12-31T23:59:59.999Z
- All other components are mandatory.
-
- Expressions can also be used to denote calculations that should be
- performed relative to "NOW" to determine the value, ie...
-
- NOW/HOUR
- ... Round to the start of the current hour
- NOW-1DAY
- ... Exactly 1 day prior to now
- NOW/DAY+6MONTHS+3DAYS
- ... 6 months and 3 days in the future from the start of
- the current day
-
- Consult the DateField javadocs for more information.
- -->
- <fieldType name="date" class="solr.DateField" sortMissingLast="true" omitNorms="true"/>
-
-
- <!-- The "RandomSortField" is not used to store or search any
- data. You can declare fields of this type it in your schema
- to generate psuedo-random orderings of your docs for sorting
- purposes. The ordering is generated based on the field name
- and the version of the index, As long as the index version
- remains unchanged, and the same field name is reused,
- the ordering of the docs will be consistent.
- If you want differend psuedo-random orderings of documents,
- for the same version of the index, use a dynamicField and
- change the name
- -->
- <fieldType name="random" class="solr.RandomSortField" indexed="true" />
-
- <!-- solr.TextField allows the specification of custom text analyzers
- specified as a tokenizer and a list of token filters. Different
- analyzers may be specified for indexing and querying.
-
- The optional positionIncrementGap puts space between multiple fields of
- this type on the same document, with the purpose of preventing false phrase
- matching across fields.
-
- For more info on customizing your analyzer chain, please see
- http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters
- -->
-
- <!-- One can also specify an existing Analyzer class that has a
- default constructor via the class attribute on the analyzer element
- <fieldType name="text_greek" class="solr.TextField">
- <analyzer class="org.apache.lucene.analysis.el.GreekAnalyzer"/>
- </fieldType>
- -->
-
- <!-- A text field that only splits on whitespace for exact matching of words -->
- <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
- <analyzer>
- <tokenizer class="solr.WhitespaceTokenizerFactory"/>
- </analyzer>
- </fieldType>
-
- <!-- A text field that uses WordDelimiterFilter to enable splitting and matching of
- words on case-change, alpha numeric boundaries, and non-alphanumeric chars,
- so that a query of "wifi" or "wi fi" could match a document containing "Wi-Fi".
- Synonyms and stopwords are customized by external files, and stemming is enabled.
- Duplicate tokens at the same position (which may result from Stemmed Synonyms or
- WordDelim parts) are removed.
- -->
- <fieldType name="text" class="solr.TextField" positionIncrementGap="100">
- <analyzer type="index">
- <tokenizer class="solr.WhitespaceTokenizerFactory"/>
- <!-- in this example, we will only use synonyms at query time
- <filter class="solr.SynonymFilterFactory" synonyms="index_synonyms.txt" ignoreCase="true" expand="false"/>
- -->
- <!--<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>-->
- <filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0" splitOnCaseChange="1"/>
- <filter class="solr.LowerCaseFilterFactory"/>
- <!--<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
- <filter class="solr.PorterStemFilterFactory"/>-->
- <filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
- </analyzer>
- <analyzer type="query">
- <tokenizer class="solr.WhitespaceTokenizerFactory"/>
- <!--<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>-->
- <!--<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>-->
- <filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="1"/>
- <filter class="solr.LowerCaseFilterFactory"/>
- <!--<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
- <filter class="solr.PorterStemFilterFactory"/>-->
- <filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
- </analyzer>
- </fieldType>
- <!-- since fields of this type are by default not stored or indexed, any data added to
- them will be ignored outright
- -->
- <fieldtype name="ignored" stored="false" indexed="false" class="solr.StrField" />
-
- </types>
-
-
- <fields>
- <field name="title" type="string" indexed="true" stored="true"/>
- <field name="author" type="string" indexed="true" stored="true" />
- <field name="text" type="text" indexed="true" stored="true" />
-
- </fields>
- <!-- field for the QueryParser to use when an explicit fieldname is absent -->
- <defaultSearchField>text</defaultSearchField>
-
- <!-- SolrQueryParser configuration: defaultOperator="AND|OR" -->
- <solrQueryParser defaultOperator="OR"/>
-
-</schema>
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!--
+ This is the Solr schema file. This file should be named "schema.xml" and
+ should be in the conf directory under the solr home
+ (i.e. ./solr/conf/schema.xml by default)
+ or located where the classloader for the Solr webapp can find it.
+
+ This example schema is the recommended starting point for users.
+ It should be kept correct and concise, usable out-of-the-box.
+
+ For more information, on how to customize this file, please see
+ http://wiki.apache.org/solr/SchemaXml
+-->
+
+<schema name="test" version="1.2">
+ <!-- attribute "name" is the name of this schema and is only used for display purposes.
+ Applications should change this to reflect the nature of the search collection.
+ version="1.1" is Solr's version number for the schema syntax and semantics. It should
+ not normally be changed by applications.
+ 1.0: multiValued attribute did not exist, all fields are multiValued by nature
+ 1.1: multiValued attribute introduced, false by default -->
+
+ <types>
+ <!-- field type definitions. The "name" attribute is
+ just a label to be used by field definitions. The "class"
+ attribute and any other attributes determine the real
+ behavior of the fieldType.
+ Class names starting with "solr" refer to java classes in the
+ org.apache.solr.analysis package.
+ -->
+
+ <!-- The StrField type is not analyzed, but indexed/stored verbatim.
+ - StrField and TextField support an optional compressThreshold which
+ limits compression (if enabled in the derived fields) to values which
+ exceed a certain size (in characters).
+ -->
+ <fieldType name="string" class="solr.StrField" sortMissingLast="true" omitNorms="true"/>
+
+ <!-- boolean type: "true" or "false" -->
+ <fieldType name="boolean" class="solr.BoolField" sortMissingLast="true" omitNorms="true"/>
+
+ <!-- The optional sortMissingLast and sortMissingFirst attributes are
+ currently supported on types that are sorted internally as strings.
+ - If sortMissingLast="true", then a sort on this field will cause documents
+ without the field to come after documents with the field,
+ regardless of the requested sort order (asc or desc).
+ - If sortMissingFirst="true", then a sort on this field will cause documents
+ without the field to come before documents with the field,
+ regardless of the requested sort order.
+ - If sortMissingLast="false" and sortMissingFirst="false" (the default),
+ then default lucene sorting will be used which places docs without the
+ field first in an ascending sort and last in a descending sort.
+ -->
+
+
+ <!-- numeric field types that store and index the text
+ value verbatim (and hence don't support range queries, since the
+ lexicographic ordering isn't equal to the numeric ordering) -->
+ <fieldType name="integer" class="solr.IntField" omitNorms="true"/>
+ <fieldType name="long" class="solr.LongField" omitNorms="true"/>
+ <fieldType name="float" class="solr.FloatField" omitNorms="true"/>
+ <fieldType name="double" class="solr.DoubleField" omitNorms="true"/>
+
+
+ <!-- Numeric field types that manipulate the value into
+ a string value that isn't human-readable in its internal form,
+ but with a lexicographic ordering the same as the numeric ordering,
+ so that range queries work correctly. -->
+ <fieldType name="sint" class="solr.SortableIntField" sortMissingLast="true" omitNorms="true"/>
+ <fieldType name="slong" class="solr.SortableLongField" sortMissingLast="true" omitNorms="true"/>
+ <fieldType name="sfloat" class="solr.SortableFloatField" sortMissingLast="true" omitNorms="true"/>
+ <fieldType name="sdouble" class="solr.SortableDoubleField" sortMissingLast="true" omitNorms="true"/>
+
+
+ <!-- The format for this date field is of the form 1995-12-31T23:59:59Z, and
+ is a more restricted form of the canonical representation of dateTime
+ http://www.w3.org/TR/xmlschema-2/#dateTime
+ The trailing "Z" designates UTC time and is mandatory.
+ Optional fractional seconds are allowed: 1995-12-31T23:59:59.999Z
+ All other components are mandatory.
+
+ Expressions can also be used to denote calculations that should be
+ performed relative to "NOW" to determine the value, ie...
+
+ NOW/HOUR
+ ... Round to the start of the current hour
+ NOW-1DAY
+ ... Exactly 1 day prior to now
+ NOW/DAY+6MONTHS+3DAYS
+ ... 6 months and 3 days in the future from the start of
+ the current day
+
+ Consult the DateField javadocs for more information.
+ -->
+ <fieldType name="date" class="solr.DateField" sortMissingLast="true" omitNorms="true"/>
+
+
+ <!-- The "RandomSortField" is not used to store or search any
+ data. You can declare fields of this type it in your schema
+ to generate psuedo-random orderings of your docs for sorting
+ purposes. The ordering is generated based on the field name
+ and the version of the index, As long as the index version
+ remains unchanged, and the same field name is reused,
+ the ordering of the docs will be consistent.
+ If you want differend psuedo-random orderings of documents,
+ for the same version of the index, use a dynamicField and
+ change the name
+ -->
+ <fieldType name="random" class="solr.RandomSortField" indexed="true" />
+
+ <!-- solr.TextField allows the specification of custom text analyzers
+ specified as a tokenizer and a list of token filters. Different
+ analyzers may be specified for indexing and querying.
+
+ The optional positionIncrementGap puts space between multiple fields of
+ this type on the same document, with the purpose of preventing false phrase
+ matching across fields.
+
+ For more info on customizing your analyzer chain, please see
+ http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters
+ -->
+
+ <!-- One can also specify an existing Analyzer class that has a
+ default constructor via the class attribute on the analyzer element
+ <fieldType name="text_greek" class="solr.TextField">
+ <analyzer class="org.apache.lucene.analysis.el.GreekAnalyzer"/>
+ </fieldType>
+ -->
+
+ <!-- A text field that only splits on whitespace for exact matching of words -->
+ <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
+ <analyzer>
+ <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+ </analyzer>
+ </fieldType>
+
+ <!-- A text field that uses WordDelimiterFilter to enable splitting and matching of
+ words on case-change, alpha numeric boundaries, and non-alphanumeric chars,
+ so that a query of "wifi" or "wi fi" could match a document containing "Wi-Fi".
+ Synonyms and stopwords are customized by external files, and stemming is enabled.
+ Duplicate tokens at the same position (which may result from Stemmed Synonyms or
+ WordDelim parts) are removed.
+ -->
+ <fieldType name="text" class="solr.TextField" positionIncrementGap="100">
+ <analyzer type="index">
+ <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+ <!-- in this example, we will only use synonyms at query time
+ <filter class="solr.SynonymFilterFactory" synonyms="index_synonyms.txt" ignoreCase="true" expand="false"/>
+ -->
+ <!--<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>-->
+ <filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0" splitOnCaseChange="1"/>
+ <filter class="solr.LowerCaseFilterFactory"/>
+ <!--<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+ <filter class="solr.PorterStemFilterFactory"/>-->
+ <filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
+ </analyzer>
+ <analyzer type="query">
+ <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+ <!--<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>-->
+ <!--<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>-->
+ <filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="1"/>
+ <filter class="solr.LowerCaseFilterFactory"/>
+ <!--<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+ <filter class="solr.PorterStemFilterFactory"/>-->
+ <filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
+ </analyzer>
+ </fieldType>
+ <!-- since fields of this type are by default not stored or indexed, any data added to
+ them will be ignored outright
+ -->
+ <fieldtype name="ignored" stored="false" indexed="false" class="solr.StrField" />
+
+ </types>
+
+
+ <fields>
+ <field name="title" type="string" indexed="true" stored="true"/>
+ <field name="author" type="string" indexed="true" stored="true" />
+ <field name="text" type="text" indexed="true" stored="true" />
+
+ </fields>
+ <!-- field for the QueryParser to use when an explicit fieldname is absent -->
+ <defaultSearchField>text</defaultSearchField>
+
+ <!-- SolrQueryParser configuration: defaultOperator="AND|OR" -->
+ <solrQueryParser defaultOperator="OR"/>
+
+</schema>
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/extras/test/resources/solr/conf/dataimport-schema-no-unique-key.xml
------------------------------------------------------------------------------
svn:eol-style = native
Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinContentStreamDataSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinContentStreamDataSource.java?rev=930161&r1=930160&r2=930161&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinContentStreamDataSource.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinContentStreamDataSource.java Fri Apr 2 04:36:13 2010
@@ -1,68 +1,68 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.handler.dataimport;
-
-import org.apache.solr.common.util.ContentStream;
-import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
-
-import java.io.InputStream;
-import java.io.IOException;
-import java.util.Properties;
-/**
- * <p> A data source implementation which can be used to read binary stream from content streams. </p> <p/> <p> Refer to <a
- * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache.org/solr/DataImportHandler</a> for more
- * details. </p>
- * <p/>
- * <b>This API is experimental and may change in the future.</b>
- *
- * @version $Id$
- * @since solr 1.5
- */
-
-public class BinContentStreamDataSource extends DataSource<InputStream> {
- private ContextImpl context;
- private ContentStream contentStream;
- private InputStream in;
-
-
- public void init(Context context, Properties initProps) {
- this.context = (ContextImpl) context;
- }
-
- public InputStream getData(String query) {
- contentStream = context.getDocBuilder().requestParameters.contentStream;
- if (contentStream == null)
- throw new DataImportHandlerException(SEVERE, "No stream available. The request has no body");
- try {
- return in = contentStream.getStream();
- } catch (IOException e) {
- DataImportHandlerException.wrapAndThrow(SEVERE, e);
- return null;
- }
- }
-
- public void close() {
- if (contentStream != null) {
- try {
- if (in == null) in = contentStream.getStream();
- in.close();
- } catch (IOException e) {
- /*no op*/
- }
- }
- }
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import org.apache.solr.common.util.ContentStream;
+import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
+
+import java.io.InputStream;
+import java.io.IOException;
+import java.util.Properties;
+/**
+ * <p> A data source implementation which can be used to read binary stream from content streams. </p> <p/> <p> Refer to <a
+ * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache.org/solr/DataImportHandler</a> for more
+ * details. </p>
+ * <p/>
+ * <b>This API is experimental and may change in the future.</b>
+ *
+ * @version $Id$
+ * @since solr 1.5
+ */
+
+public class BinContentStreamDataSource extends DataSource<InputStream> {
+ private ContextImpl context;
+ private ContentStream contentStream;
+ private InputStream in;
+
+
+ public void init(Context context, Properties initProps) {
+ this.context = (ContextImpl) context;
+ }
+
+ public InputStream getData(String query) {
+ contentStream = context.getDocBuilder().requestParameters.contentStream;
+ if (contentStream == null)
+ throw new DataImportHandlerException(SEVERE, "No stream available. The request has no body");
+ try {
+ return in = contentStream.getStream();
+ } catch (IOException e) {
+ DataImportHandlerException.wrapAndThrow(SEVERE, e);
+ return null;
+ }
+ }
+
+ public void close() {
+ if (contentStream != null) {
+ try {
+ if (in == null) in = contentStream.getStream();
+ in.close();
+ } catch (IOException e) {
+ /*no op*/
+ }
+ }
+ }
+}
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinContentStreamDataSource.java
------------------------------------------------------------------------------
svn:eol-style = native
Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinFileDataSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinFileDataSource.java?rev=930161&r1=930160&r2=930161&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinFileDataSource.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinFileDataSource.java Fri Apr 2 04:36:13 2010
@@ -1,63 +1,63 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.handler.dataimport;
-
-import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow;
-import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
-
-import java.io.InputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.util.Properties;
-/**
- * <p>
- * A DataSource which reads from local files
- * </p>
- * <p/>
- * <p>
- * Refer to <a
- * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache.org/solr/DataImportHandler</a>
- * for more details.
- * </p>
- * <p/>
- * <b>This API is experimental and may change in the future.</b>
- *
- * @version $Id$
- * @since solr 1.5
- */
-
-public class BinFileDataSource extends DataSource<InputStream>{
- protected String basePath;
- public void init(Context context, Properties initProps) {
- basePath = initProps.getProperty(FileDataSource.BASE_PATH);
- }
-
- public InputStream getData(String query) {
- File f = FileDataSource.getFile(basePath,query);
- try {
- return new FileInputStream(f);
- } catch (FileNotFoundException e) {
- wrapAndThrow(SEVERE,e,"Unable to open file "+f.getAbsolutePath());
- return null;
- }
- }
-
- public void close() {
-
- }
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow;
+import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
+
+import java.io.InputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.util.Properties;
+/**
+ * <p>
+ * A DataSource which reads from local files
+ * </p>
+ * <p/>
+ * <p>
+ * Refer to <a
+ * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache.org/solr/DataImportHandler</a>
+ * for more details.
+ * </p>
+ * <p/>
+ * <b>This API is experimental and may change in the future.</b>
+ *
+ * @version $Id$
+ * @since solr 1.5
+ */
+
+public class BinFileDataSource extends DataSource<InputStream>{
+ protected String basePath;
+ public void init(Context context, Properties initProps) {
+ basePath = initProps.getProperty(FileDataSource.BASE_PATH);
+ }
+
+ public InputStream getData(String query) {
+ File f = FileDataSource.getFile(basePath,query);
+ try {
+ return new FileInputStream(f);
+ } catch (FileNotFoundException e) {
+ wrapAndThrow(SEVERE,e,"Unable to open file "+f.getAbsolutePath());
+ return null;
+ }
+ }
+
+ public void close() {
+
+ }
+}
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinFileDataSource.java
------------------------------------------------------------------------------
svn:eol-style = native
Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinURLDataSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinURLDataSource.java?rev=930161&r1=930160&r2=930161&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinURLDataSource.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinURLDataSource.java Fri Apr 2 04:36:13 2010
@@ -1,101 +1,101 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.handler.dataimport;
-
-import static org.apache.solr.handler.dataimport.DataImportHandlerException.*;
-import static org.apache.solr.handler.dataimport.URLDataSource.*;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.InputStream;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.Properties;
-/**
- * <p> A data source implementation which can be used to read binary streams using HTTP. </p> <p/> <p> Refer to <a
- * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache.org/solr/DataImportHandler</a> for more
- * details. </p>
- * <p/>
- * <b>This API is experimental and may change in the future.</b>
- *
- * @version $Id$
- * @since solr 1.5
- */
-public class BinURLDataSource extends DataSource<InputStream>{
- private static final Logger LOG = LoggerFactory.getLogger(BinURLDataSource.class);
-
- private String baseUrl;
- private int connectionTimeout = CONNECTION_TIMEOUT;
-
- private int readTimeout = READ_TIMEOUT;
-
- private Context context;
-
- private Properties initProps;
-
- public BinURLDataSource() { }
-
- public void init(Context context, Properties initProps) {
- this.context = context;
- this.initProps = initProps;
-
- baseUrl = getInitPropWithReplacements(BASE_URL);
- String cTimeout = getInitPropWithReplacements(CONNECTION_TIMEOUT_FIELD_NAME);
- String rTimeout = getInitPropWithReplacements(READ_TIMEOUT_FIELD_NAME);
- if (cTimeout != null) {
- try {
- connectionTimeout = Integer.parseInt(cTimeout);
- } catch (NumberFormatException e) {
- LOG.warn("Invalid connection timeout: " + cTimeout);
- }
- }
- if (rTimeout != null) {
- try {
- readTimeout = Integer.parseInt(rTimeout);
- } catch (NumberFormatException e) {
- LOG.warn("Invalid read timeout: " + rTimeout);
- }
- }
- }
-
- public InputStream getData(String query) {
- URL url = null;
- try {
- if (URIMETHOD.matcher(query).find()) url = new URL(query);
- else url = new URL(baseUrl + query);
- LOG.debug("Accessing URL: " + url.toString());
- URLConnection conn = url.openConnection();
- conn.setConnectTimeout(connectionTimeout);
- conn.setReadTimeout(readTimeout);
- return conn.getInputStream();
- } catch (Exception e) {
- LOG.error("Exception thrown while getting data", e);
- wrapAndThrow (SEVERE, e, "Exception in invoking url " + url);
- return null;//unreachable
- }
- }
-
- public void close() { }
-
- private String getInitPropWithReplacements(String propertyName) {
- final String expr = initProps.getProperty(propertyName);
- if (expr == null) {
- return null;
- }
- return context.replaceTokens(expr);
- }
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import static org.apache.solr.handler.dataimport.DataImportHandlerException.*;
+import static org.apache.solr.handler.dataimport.URLDataSource.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.InputStream;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Properties;
+/**
+ * <p> A data source implementation which can be used to read binary streams using HTTP. </p> <p/> <p> Refer to <a
+ * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache.org/solr/DataImportHandler</a> for more
+ * details. </p>
+ * <p/>
+ * <b>This API is experimental and may change in the future.</b>
+ *
+ * @version $Id$
+ * @since solr 1.5
+ */
+public class BinURLDataSource extends DataSource<InputStream>{
+ private static final Logger LOG = LoggerFactory.getLogger(BinURLDataSource.class);
+
+ private String baseUrl;
+ private int connectionTimeout = CONNECTION_TIMEOUT;
+
+ private int readTimeout = READ_TIMEOUT;
+
+ private Context context;
+
+ private Properties initProps;
+
+ public BinURLDataSource() { }
+
+ public void init(Context context, Properties initProps) {
+ this.context = context;
+ this.initProps = initProps;
+
+ baseUrl = getInitPropWithReplacements(BASE_URL);
+ String cTimeout = getInitPropWithReplacements(CONNECTION_TIMEOUT_FIELD_NAME);
+ String rTimeout = getInitPropWithReplacements(READ_TIMEOUT_FIELD_NAME);
+ if (cTimeout != null) {
+ try {
+ connectionTimeout = Integer.parseInt(cTimeout);
+ } catch (NumberFormatException e) {
+ LOG.warn("Invalid connection timeout: " + cTimeout);
+ }
+ }
+ if (rTimeout != null) {
+ try {
+ readTimeout = Integer.parseInt(rTimeout);
+ } catch (NumberFormatException e) {
+ LOG.warn("Invalid read timeout: " + rTimeout);
+ }
+ }
+ }
+
+ public InputStream getData(String query) {
+ URL url = null;
+ try {
+ if (URIMETHOD.matcher(query).find()) url = new URL(query);
+ else url = new URL(baseUrl + query);
+ LOG.debug("Accessing URL: " + url.toString());
+ URLConnection conn = url.openConnection();
+ conn.setConnectTimeout(connectionTimeout);
+ conn.setReadTimeout(readTimeout);
+ return conn.getInputStream();
+ } catch (Exception e) {
+ LOG.error("Exception thrown while getting data", e);
+ wrapAndThrow (SEVERE, e, "Exception in invoking url " + url);
+ return null;//unreachable
+ }
+ }
+
+ public void close() { }
+
+ private String getInitPropWithReplacements(String propertyName) {
+ final String expr = initProps.getProperty(propertyName);
+ if (expr == null) {
+ return null;
+ }
+ return context.replaceTokens(expr);
+ }
+}
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/BinURLDataSource.java
------------------------------------------------------------------------------
svn:eol-style = native
Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java?rev=930161&r1=930160&r2=930161&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java Fri Apr 2 04:36:13 2010
@@ -1,95 +1,95 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.handler.dataimport;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.lang.reflect.Method;
-import java.lang.reflect.Modifier;
-import java.sql.Blob;
-import java.util.Properties;
-
-import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
-
-
-/**
- * This can be useful for users who have a DB field containing BLOBs which may be Rich documents
- * <p/>
- * The datasouce may be configured as follows
- * <p/>
- * <datasource name="f1" type="FieldStreamDataSource" />
- * <p/>
- * The enity which uses this datasource must keep and attribute dataField
- * <p/>
- * The fieldname must be resolvable from VariableResolver
- * <p/>
- * This may be used with any EntityProcessor which uses a DataSource<InputStream> eg:TikaEntityProcessor
- * <p/>
- *
- * @version $Id$
- * @since 1.5
- */
-public class FieldStreamDataSource extends DataSource<InputStream> {
- private static final Logger LOG = LoggerFactory.getLogger(FieldReaderDataSource.class);
- protected VariableResolver vr;
- protected String dataField;
- private EntityProcessorWrapper wrapper;
-
- public void init(Context context, Properties initProps) {
- dataField = context.getEntityAttribute("dataField");
- wrapper = (EntityProcessorWrapper) context.getEntityProcessor();
- /*no op*/
- }
-
- public InputStream getData(String query) {
- Object o = wrapper.getVariableResolver().resolve(dataField);
- if (o == null) {
- throw new DataImportHandlerException(SEVERE, "No field available for name : " + dataField);
- }
- if (o instanceof Blob) {
- Blob blob = (Blob) o;
- try {
- //Most of the JDBC drivers have getBinaryStream defined as public
- // so let us just check it
- Method m = blob.getClass().getDeclaredMethod("getBinaryStream");
- if (Modifier.isPublic(m.getModifiers())) {
- return (InputStream) m.invoke(blob);
- } else {
- // force invoke
- m.setAccessible(true);
- return (InputStream) m.invoke(blob);
- }
- } catch (Exception e) {
- LOG.info("Unable to get data from BLOB");
- return null;
-
- }
- } else if (o instanceof byte[]) {
- byte[] bytes = (byte[]) o;
- return new ByteArrayInputStream(bytes);
- } else {
- throw new RuntimeException("unsupported type : " + o.getClass());
- }
-
- }
-
- public void close() {
- }
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.sql.Blob;
+import java.util.Properties;
+
+import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
+
+
+/**
+ * This can be useful for users who have a DB field containing BLOBs which may be Rich documents
+ * <p/>
+ * The datasouce may be configured as follows
+ * <p/>
+ * <datasource name="f1" type="FieldStreamDataSource" />
+ * <p/>
+ * The enity which uses this datasource must keep and attribute dataField
+ * <p/>
+ * The fieldname must be resolvable from VariableResolver
+ * <p/>
+ * This may be used with any EntityProcessor which uses a DataSource<InputStream> eg:TikaEntityProcessor
+ * <p/>
+ *
+ * @version $Id$
+ * @since 1.5
+ */
+public class FieldStreamDataSource extends DataSource<InputStream> {
+ private static final Logger LOG = LoggerFactory.getLogger(FieldReaderDataSource.class);
+ protected VariableResolver vr;
+ protected String dataField;
+ private EntityProcessorWrapper wrapper;
+
+ public void init(Context context, Properties initProps) {
+ dataField = context.getEntityAttribute("dataField");
+ wrapper = (EntityProcessorWrapper) context.getEntityProcessor();
+ /*no op*/
+ }
+
+ public InputStream getData(String query) {
+ Object o = wrapper.getVariableResolver().resolve(dataField);
+ if (o == null) {
+ throw new DataImportHandlerException(SEVERE, "No field available for name : " + dataField);
+ }
+ if (o instanceof Blob) {
+ Blob blob = (Blob) o;
+ try {
+ //Most of the JDBC drivers have getBinaryStream defined as public
+ // so let us just check it
+ Method m = blob.getClass().getDeclaredMethod("getBinaryStream");
+ if (Modifier.isPublic(m.getModifiers())) {
+ return (InputStream) m.invoke(blob);
+ } else {
+ // force invoke
+ m.setAccessible(true);
+ return (InputStream) m.invoke(blob);
+ }
+ } catch (Exception e) {
+ LOG.info("Unable to get data from BLOB");
+ return null;
+
+ }
+ } else if (o instanceof byte[]) {
+ byte[] bytes = (byte[]) o;
+ return new ByteArrayInputStream(bytes);
+ } else {
+ throw new RuntimeException("unsupported type : " + o.getClass());
+ }
+
+ }
+
+ public void close() {
+ }
+}
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ThreadedContext.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/ThreadedEntityProcessorWrapper.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/MockInitialContextFactory.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta2.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestThreaded.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestURLDataSource.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/resources/solr/conf/dataimport-solr_id-schema.xml
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/extraction/solr-cell-pom.xml.template
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/build.xml
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/solr-velocity-pom.xml.template
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/java/footer.vm
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/java/org/apache/solr/response/PageTool.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/java/org/apache/solr/response/SolrParamResourceLoader.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/java/org/apache/solr/response/SolrVelocityResourceLoader.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/java/org/apache/solr/response/VelocityResponseWriter.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/admin-extra.html
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/elevate.xml
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/protwords.txt
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/schema.xml
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/scripts.conf
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/solrconfig.xml
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/spellings.txt
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/stopwords.txt
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/synonyms.txt
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/velocity/VM_global_library.vm
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/velocity/browse.vm
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/velocity/debug.vm
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/velocity/header.vm
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/velocity/hit.vm
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/velocity/layout.vm
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/main/solr/conf/velocity/main.css
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/contrib/velocity/src/test/org/apache/solr/velocity/VelocityResponseWriterTest.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/example/README.txt
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/example/etc/webdefault.xml
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/example/example-DIH/solr/db/conf/scripts.conf
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/dev/trunk/solr/example/example-DIH/solr/rss/conf/scripts.conf
------------------------------------------------------------------------------
svn:eol-style = native