You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by us...@apache.org on 2012/08/09 12:21:07 UTC

svn commit: r1371142 [28/32] - in /lucene/dev/branches/lucene3312: ./ dev-tools/ dev-tools/eclipse/ dev-tools/maven/ dev-tools/maven/lucene/ dev-tools/maven/lucene/analysis/common/ dev-tools/maven/lucene/analysis/icu/ dev-tools/maven/lucene/analysis/ku...

Modified: lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/index/AssertingAtomicReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/index/AssertingAtomicReader.java?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/index/AssertingAtomicReader.java (original)
+++ lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/index/AssertingAtomicReader.java Thu Aug  9 10:20:53 2012
@@ -125,7 +125,7 @@ public class AssertingAtomicReader exten
     }
 
     @Override
-    public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
+    public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
       assert state == State.POSITIONED: "docs(...) called on unpositioned TermsEnum";
 
       // TODO: should we give this thing a random to be super-evil,
@@ -133,12 +133,12 @@ public class AssertingAtomicReader exten
       if (reuse instanceof AssertingDocsEnum) {
         reuse = ((AssertingDocsEnum) reuse).in;
       }
-      DocsEnum docs = super.docs(liveDocs, reuse, needsFreqs);
+      DocsEnum docs = super.docs(liveDocs, reuse, flags);
       return docs == null ? null : new AssertingDocsEnum(docs);
     }
 
     @Override
-    public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, boolean needsOffsets) throws IOException {
+    public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException {
       assert state == State.POSITIONED: "docsAndPositions(...) called on unpositioned TermsEnum";
 
       // TODO: should we give this thing a random to be super-evil,
@@ -146,7 +146,7 @@ public class AssertingAtomicReader exten
       if (reuse instanceof AssertingDocsAndPositionsEnum) {
         reuse = ((AssertingDocsAndPositionsEnum) reuse).in;
       }
-      DocsAndPositionsEnum docs = super.docsAndPositions(liveDocs, reuse, needsOffsets);
+      DocsAndPositionsEnum docs = super.docsAndPositions(liveDocs, reuse, flags);
       return docs == null ? null : new AssertingDocsAndPositionsEnum(docs);
     }
 

Modified: lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java (original)
+++ lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java Thu Aug  9 10:20:53 2012
@@ -29,6 +29,7 @@ import java.util.Set;
 
 import org.apache.lucene.codecs.PostingsFormat;
 import org.apache.lucene.codecs.asserting.AssertingPostingsFormat;
+import org.apache.lucene.codecs.bloom.TestBloomFilteredLucene40Postings;
 import org.apache.lucene.codecs.lucene40.Lucene40Codec;
 import org.apache.lucene.codecs.lucene40.Lucene40PostingsFormat;
 import org.apache.lucene.codecs.lucene40ords.Lucene40WithOrds;
@@ -98,6 +99,10 @@ public class RandomCodec extends Lucene4
         new Pulsing40PostingsFormat(1 + random.nextInt(20), minItemsPerBlock, maxItemsPerBlock),
         // add pulsing again with (usually) different parameters
         new Pulsing40PostingsFormat(1 + random.nextInt(20), minItemsPerBlock, maxItemsPerBlock),
+        //TODO as a PostingsFormat which wraps others, we should allow TestBloomFilteredLucene40Postings to be constructed 
+        //with a choice of concrete PostingsFormats. Maybe useful to have a generic means of marking and dealing 
+        //with such "wrapper" classes?
+        new TestBloomFilteredLucene40Postings(),                
         new MockSepPostingsFormat(),
         new MockFixedIntBlockPostingsFormat(_TestUtil.nextInt(random, 1, 2000)),
         new MockVariableIntBlockPostingsFormat( _TestUtil.nextInt(random, 1, 127)),

Modified: lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/search/RandomSimilarityProvider.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/search/RandomSimilarityProvider.java?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/search/RandomSimilarityProvider.java (original)
+++ lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/search/RandomSimilarityProvider.java Thu Aug  9 10:20:53 2012
@@ -67,12 +67,12 @@ public class RandomSimilarityProvider ex
   final List<Similarity> knownSims;
   Map<String,Similarity> previousMappings = new HashMap<String,Similarity>();
   final int perFieldSeed;
-  final boolean shouldCoord;
+  final int coordType; // 0 = no coord, 1 = coord, 2 = crazy coord
   final boolean shouldQueryNorm;
   
   public RandomSimilarityProvider(Random random) {
     perFieldSeed = random.nextInt();
-    shouldCoord = random.nextBoolean();
+    coordType = random.nextInt(3);
     shouldQueryNorm = random.nextBoolean();
     knownSims = new ArrayList<Similarity>(allSims);
     Collections.shuffle(knownSims, random);
@@ -80,10 +80,12 @@ public class RandomSimilarityProvider ex
   
   @Override
   public float coord(int overlap, int maxOverlap) {
-    if (shouldCoord) {
+    if (coordType == 0) {
+      return 1.0f;
+    } else if (coordType == 1) {
       return defaultSim.coord(overlap, maxOverlap);
     } else {
-      return 1.0f;
+      return overlap / ((float)maxOverlap + 1);
     }
   }
   
@@ -161,6 +163,14 @@ public class RandomSimilarityProvider ex
   
   @Override
   public synchronized String toString() {
-    return "RandomSimilarityProvider(queryNorm=" + shouldQueryNorm + ",coord=" + shouldCoord + "): " + previousMappings.toString();
+    final String coordMethod;
+    if (coordType == 0) {
+      coordMethod = "no";
+    } else if (coordType == 1) {
+      coordMethod = "yes";
+    } else {
+      coordMethod = "crazy";
+    }
+    return "RandomSimilarityProvider(queryNorm=" + shouldQueryNorm + ",coord=" + coordMethod + "): " + previousMappings.toString();
   }
 }

Modified: lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java (original)
+++ lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java Thu Aug  9 10:20:53 2012
@@ -18,7 +18,6 @@ package org.apache.lucene.search;
  */
 
 import java.io.Closeable;
-import java.io.File;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -27,15 +26,17 @@ import java.util.Random;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.TermContext;
-import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.PrintStreamInfoStream;
+import org.apache.lucene.util._TestUtil;
 
 // TODO
 //   - doc blocks?  so we can test joins/grouping...
@@ -423,11 +424,16 @@ public abstract class ShardSearchingTest
 
     private volatile ShardIndexSearcher currentShardSearcher;
 
-    public NodeState(Random random, String baseDir, int nodeID, int numNodes) throws IOException {
+    public NodeState(Random random, int nodeID, int numNodes) throws IOException {
       myNodeID = nodeID;
-      dir = newFSDirectory(new File(baseDir + "." + myNodeID));
+      dir = newFSDirectory(_TestUtil.getTempDir("ShardSearchingTestBase"));
       // TODO: set warmer
-      writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+      IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+      iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
+      if (VERBOSE) {
+        iwc.setInfoStream(new PrintStreamInfoStream(System.out));
+      }
+      writer = new IndexWriter(dir, iwc);
       mgr = new SearcherManager(writer, true, null);
       searchers = new SearcherLifetimeManager();
 
@@ -556,14 +562,14 @@ public abstract class ShardSearchingTest
   long endTimeNanos;
   private Thread changeIndicesThread;
 
-  protected void start(String baseDirName, int numNodes, double runTimeSec, int maxSearcherAgeSeconds) throws IOException {
+  protected void start(int numNodes, double runTimeSec, int maxSearcherAgeSeconds) throws IOException {
 
     endTimeNanos = System.nanoTime() + (long) (runTimeSec*1000000000);
     this.maxSearcherAgeSeconds = maxSearcherAgeSeconds;
 
     nodes = new NodeState[numNodes];
     for(int nodeID=0;nodeID<numNodes;nodeID++) {
-      nodes[nodeID] = new NodeState(random(), baseDirName, nodeID, numNodes);
+      nodes[nodeID] = new NodeState(random(), nodeID, numNodes);
     }
 
     long[] nodeVersions = new long[nodes.length];

Modified: lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryWrapper.java?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryWrapper.java (original)
+++ lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryWrapper.java Thu Aug  9 10:20:53 2012
@@ -31,8 +31,6 @@ import org.apache.lucene.util._TestUtil;
 public class BaseDirectoryWrapper extends Directory {
   /** our in directory */
   protected final Directory delegate;
-  /** best effort: base on in Directory is volatile */
-  protected boolean open;
   
   private boolean checkIndexOnClose = true;
   private boolean crossCheckTermVectorsOnClose = true;
@@ -43,7 +41,7 @@ public class BaseDirectoryWrapper extend
 
   @Override
   public void close() throws IOException {
-    open = false;
+    isOpen = false;
     if (checkIndexOnClose && indexPossiblyExists()) {
       _TestUtil.checkIndex(this, crossCheckTermVectorsOnClose);
     }
@@ -51,7 +49,7 @@ public class BaseDirectoryWrapper extend
   }
   
   public boolean isOpen() {
-    return open;
+    return isOpen;
   }
   
   /** 

Modified: lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java (original)
+++ lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java Thu Aug  9 10:20:53 2012
@@ -551,7 +551,7 @@ public class MockDirectoryWrapper extend
     if (noDeleteOpenFile && openLocks.size() > 0) {
       throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open locks: " + openLocks);
     }
-    open = false;
+    isOpen = false;
     if (getCheckIndexOnClose()) {
       if (indexPossiblyExists()) {
         if (LuceneTestCase.VERBOSE) {
@@ -614,11 +614,6 @@ public class MockDirectoryWrapper extend
   public synchronized void removeIndexInput(IndexInput in, String name) {
     removeOpenFile(in, name);
   }
-
-  @Override
-  public synchronized boolean isOpen() {
-    return open;
-  }
   
   /**
    * Objects that represent fail-able conditions. Objects of a derived

Modified: lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/util/_TestUtil.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/util/_TestUtil.java?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/util/_TestUtil.java (original)
+++ lucene/dev/branches/lucene3312/lucene/test-framework/src/java/org/apache/lucene/util/_TestUtil.java Thu Aug  9 10:20:53 2012
@@ -857,7 +857,7 @@ public class _TestUtil {
   // Returns a DocsEnum, but randomly sometimes uses a
   // DocsAndFreqsEnum, DocsAndPositionsEnum.  Returns null
   // if field/term doesn't exist:
-  public static DocsEnum docs(Random random, IndexReader r, String field, BytesRef term, Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
+  public static DocsEnum docs(Random random, IndexReader r, String field, BytesRef term, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
     final Terms terms = MultiFields.getTerms(r, field);
     if (terms == null) {
       return null;
@@ -866,45 +866,30 @@ public class _TestUtil {
     if (!termsEnum.seekExact(term, random.nextBoolean())) {
       return null;
     }
-    if (random.nextBoolean()) {
-      if (random.nextBoolean()) {
-        // TODO: cast re-use to D&PE if we can...?
-        DocsAndPositionsEnum docsAndPositions = termsEnum.docsAndPositions(liveDocs, null, true);
-        if (docsAndPositions == null) {
-          docsAndPositions = termsEnum.docsAndPositions(liveDocs, null, false);
-        }
-        if (docsAndPositions != null) {
-          return docsAndPositions;
-        }
-      }
-      final DocsEnum docsAndFreqs = termsEnum.docs(liveDocs, reuse, true);
-      if (docsAndFreqs != null) {
-        return docsAndFreqs;
-      }
-    }
-    return termsEnum.docs(liveDocs, reuse, needsFreqs);
+    return docs(random, termsEnum, liveDocs, reuse, flags);
   }
 
   // Returns a DocsEnum from a positioned TermsEnum, but
   // randomly sometimes uses a DocsAndFreqsEnum, DocsAndPositionsEnum.
-  public static DocsEnum docs(Random random, TermsEnum termsEnum, Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
+  public static DocsEnum docs(Random random, TermsEnum termsEnum, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
     if (random.nextBoolean()) {
       if (random.nextBoolean()) {
-        // TODO: cast re-use to D&PE if we can...?
-        DocsAndPositionsEnum docsAndPositions = termsEnum.docsAndPositions(liveDocs, null, true);
-        if (docsAndPositions == null) {
-          docsAndPositions = termsEnum.docsAndPositions(liveDocs, null, false);
+        final int posFlags;
+        switch (random.nextInt(4)) {
+          case 0: posFlags = 0; break;
+          case 1: posFlags = DocsAndPositionsEnum.FLAG_OFFSETS; break;
+          case 2: posFlags = DocsAndPositionsEnum.FLAG_PAYLOADS; break;
+          default: posFlags = DocsAndPositionsEnum.FLAG_OFFSETS | DocsAndPositionsEnum.FLAG_PAYLOADS; break;
         }
+        // TODO: cast to DocsAndPositionsEnum?
+        DocsAndPositionsEnum docsAndPositions = termsEnum.docsAndPositions(liveDocs, null, posFlags);
         if (docsAndPositions != null) {
           return docsAndPositions;
         }
       }
-      final DocsEnum docsAndFreqs = termsEnum.docs(liveDocs, null, true);
-      if (docsAndFreqs != null) {
-        return docsAndFreqs;
-      }
+      flags |= DocsEnum.FLAG_FREQS;
     }
-    return termsEnum.docs(liveDocs, null, needsFreqs);
+    return termsEnum.docs(liveDocs, reuse, flags);
   }
   
   public static CharSequence stringToCharSequence(String string, Random random) {

Modified: lucene/dev/branches/lucene3312/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat (original)
+++ lucene/dev/branches/lucene3312/lucene/test-framework/src/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat Thu Aug  9 10:20:53 2012
@@ -20,5 +20,6 @@ org.apache.lucene.codecs.mocksep.MockSep
 org.apache.lucene.codecs.nestedpulsing.NestedPulsingPostingsFormat
 org.apache.lucene.codecs.ramonly.RAMOnlyPostingsFormat
 org.apache.lucene.codecs.lucene40ords.Lucene40WithOrds
+org.apache.lucene.codecs.bloom.TestBloomFilteredLucene40Postings
 org.apache.lucene.codecs.asserting.AssertingPostingsFormat
 

Modified: lucene/dev/branches/lucene3312/solr/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/CHANGES.txt?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/CHANGES.txt (original)
+++ lucene/dev/branches/lucene3312/solr/CHANGES.txt Thu Aug  9 10:20:53 2012
@@ -24,15 +24,42 @@ $Id$
 
 (No changes)
 
+==================  4.0.0 ==================
+
+Upgrading from Solr 4.0.0-BETA
+----------------------
+
+In order to better support distributed search mode, the TermVectorComponent's response format has been changed so that if the schema defines a uniqueKeyField, then that field value is used as the "key" for each document in it's response section, instead of the internal lucene doc id.  Users w/o a uniqueKeyField will continue to see the same response format.  See SOLR-3229 for more details.
+
+Bug Fixes
+----------------------
+
+* SOLR-3685: Solr Cloud sometimes skipped peersync attempt and replicated instead due
+  to tlog flags not being cleared when no updates were buffered during a previous
+  replication.  (Markus Jelsma, Mark Miller, yonik)
+
+* SOLR-3229: Fixed TermVectorComponent to work with distributed search
+  (Hang Xie, hossman)
+
 ==================  4.0.0-BETA ===================
 
+
 Versions of Major Components
 ---------------------
 Apache Tika 1.1
 Carrot2 3.5.0
 Velocity 1.6.4 and Velocity Tools 2.0
 Apache UIMA 2.3.1
-Apache ZooKeeper 3.3.5
+Apache ZooKeeper 3.3.6
+
+Upgrading from Solr 4.0.0-ALPHA
+----------------------
+
+Solr is now much more strict about requiring that the uniqueKeyField feature 
+(if used) must refer to a field which is not multiValued.  If you upgrade from 
+an earlier version of Solr and see an error that your uniqueKeyField "can not 
+be configured to be multivalued" please add 'multiValued="false"' to the 
+<field /> declaration for your uniqueKeyField.  See SOLR-3682 for more details.
 
 Detailed Change List
 ----------------------
@@ -88,14 +115,30 @@ New Features
   lib folder. The factories are automatically made available with SPI.
   (Chris Male, Robert Muir, Uwe Schindler)
 
-* SOLR-3634: CoreContainer and CoreAdminHandler will now remember and report 
-  back information about failures to initialize SolrCores.  These failures will 
-  be accessible from the STATUS command until they are "reset" by 
-  creating/renaming a SolrCore with the same name.  (hossman)
+* SOLR-3634, SOLR-3635: CoreContainer and CoreAdminHandler will now remember 
+  and report back information about failures to initialize SolrCores.  These 
+  failures will be accessible from the web UI and CoreAdminHandler STATUS 
+  command until they are "reset" by creating/renaming a SolrCore with the 
+  same name.  (hossman, steffkes)
 
 * SOLR-1280: Added commented-out example of the new script update processor
   to the example configuration.  See http://wiki.apache.org/solr/ScriptUpdateProcessor (ehatcher)
 
+* SOLR-3672: SimplePostTool: Improvements for posting files
+  Support for auto mode, recursive and wildcards (janhoy)
+
+Optimizations
+----------------------
+
+* SOLR-3708: Add hashCode to ClusterState so that structures built based on the 
+  ClusterState can be easily cached. (Mark Miller)
+
+* SOLR-3709: Cache the url list created from the ClusterState in CloudSolrServer on each 
+  request. (Mark Miller, yonik)
+  
+* SOLR-3710: Change CloudSolrServer so that update requests are only sent to leaders by 
+  default. (Mark Miller)
+
 Bug Fixes
 ----------------------
 
@@ -121,7 +164,7 @@ Bug Fixes
 * SOLR-1781: Replication index directories not always cleaned up. 
   (Markus Jelsma, Terje Sten Bjerkseth, Mark Miller)
 
-* SOLR-3639: Update ZooKeeper to 3.3.5 for a variety of bug fixes. (Mark Miller)
+* SOLR-3639: Update ZooKeeper to 3.3.6 for a variety of bug fixes. (Mark Miller)
 
 * SOLR-3629: Typo in solr.xml persistence when overriding the solrconfig.xml 
   file name using the "config" attribute prevented the override file from being
@@ -153,6 +196,20 @@ Bug Fixes
   when 'gap' is zero -- or effectively zero due to floating point arithmetic 
   underflow. (hossman)
 
+* SOLR-3648: Fixed VelocityResponseWriter template loading in SolrCloud mode.
+  For the example configuration, this means /browse now works with SolrCloud.
+  (janhoy, ehatcher)
+
+* SOLR-3677: Fixed missleading error message in web ui to distinguish between 
+  no SolrCores loaded vs. no /admin/ handler available.
+  (hossman, steffkes)
+  
+* SOLR-3428: SolrCmdDistributor flushAdds/flushDeletes can cause repeated 
+  adds/deletes to be sent (Mark Miller, Per Steffensen)
+  
+* SOLR-3647: DistributedQueue should use our Solr zk client rather than the std zk 
+  client. ZooKeeper expiration can be permanent otherwise. (Mark Miller)
+
 Other Changes
 ----------------------
 
@@ -180,7 +237,26 @@ Other Changes
 
 * SOLR-3215: Clone SolrInputDocument when distrib indexing so that update processors after
   the distrib update process do not process the document twice. (Mark Miller)
+
+* SOLR-3683: Improved error handling if an <analyzer> contains both an 
+  explicit class attribute, as well as nested factories. (hossman)
+
+* SOLR-3682: Fail to parse schema.xml if uniqueKeyField is multivalued (hossman)
+
+* SOLR-2115: DIH no longer requires the "config" parameter to be specified in solrconfig.xml.
+  Instead, the configuration is loaded and parsed with every import.  This allows the use of
+  a different configuration with each import, and makes correcting configuration errors simpler.
+  Also, the configuration itself can be passed using the "dataConfig" parameter rather than
+  using a file (this previously worked in debug mode only).  When configuration errors are 
+  encountered, the error message is returned in XML format.  (James Dyer)
+  
+* SOLR-3439: Make SolrCell easier to use out of the box. Also improves "/browse" to display
+  rich-text documents correctly, along with facets for author and content_type.  
+  With the new "content" field, highlighting of body is supported. See also SOLR-3672 for
+  easier posting of a whole directory structure. (Jack Krupansky, janhoy)
   
+* SOLR-3579: SolrCloud view should default to the graph view rather than tree view.
+  (steffkes, Mark Miller)
 
 ==================  4.0.0-ALPHA ==================
 More information about this release, including any errata related to the 
@@ -529,6 +605,11 @@ New Features
 * SOLR-3542: Add WeightedFragListBuilder for FVH and set it to default fragListBuilder
   in example solrconfig.xml. (Sebastian Lutze, koji)
 
+* SOLR-2396: Add ICUCollationField to contrib/analysis-extras, which is much 
+  more efficient than the Solr 3.x ICUCollationKeyFilterFactory, and also 
+  supports Locale-sensitive range queries.  (rmuir)
+
+
 Optimizations
 ----------------------
 
@@ -676,6 +757,17 @@ Bug Fixes
   the hashCode implementation of {!bbox} and {!geofilt} queries.
   (hossman)
 
+* SOLR-3470: contrib/clustering: custom Carrot2 tokenizer and stemmer factories
+  are respected now (Stanislaw Osinski, Dawid Weiss)
+
+* SOLR-3430: Added a new DIH test against a real SQL database.  Fixed problems 
+  revealed by this new test related to  the expanded cache support added to 
+  3.6/SOLR-2382 (James Dyer)
+             
+* SOLR-1958: When using the MailEntityProcessor, import would fail if 
+  fetchMailsSince was not specified. (Max Lynch via James Dyer) 
+
+
 Other Changes
 ----------------------
 
@@ -828,7 +920,13 @@ Other Changes
 * SOLR-3534: The Dismax and eDismax query parsers will fall back on the 'df' parameter
   when 'qf' is absent.  And if neither is present nor the schema default search field
   then an exception will be thrown now. (dsmiley)
-  
+
+* SOLR-3262: The "threads" feature of DIH is removed (deprecated in Solr 3.6) 
+  (James Dyer)
+
+* SOLR-3422: Refactored DIH internal data classes.  All entities in 
+  data-config.xml must have a name (James Dyer)
+ 
 Documentation
 ----------------------
 
@@ -861,6 +959,20 @@ Bug Fixes:
 
 * SOLR-3477: SOLR does not start up when no cores are defined (Tomás Fernández Löbbe via tommaso)
 
+* SOLR-3470: contrib/clustering: custom Carrot2 tokenizer and stemmer factories
+  are respected now (Stanislaw Osinski, Dawid Weiss)
+
+* SOLR-3360: More DIH bug fixes for the deprecated "threads" parameter.  
+  (Mikhail Khludnev, Claudio R, via James Dyer)
+
+* SOLR-3430: Added a new DIH test against a real SQL database.  Fixed problems 
+  revealed by this new test related to the expanded cache support added to 
+  3.6/SOLR-2382 (James Dyer)
+
+* SOLR-3336: SolrEntityProcessor substitutes most variables at query time.
+  (Michael Kroh, Lance Norskog, via Martijn van Groningen)
+
+
 ==================  3.6.0  ==================
 More information about this release, including any errata related to the 
 release notes, upgrade instructions, or other changes may be found online at:
@@ -1003,6 +1115,37 @@ New Features
   exception from being thrown by the default parser if "q" is missing. (yonik)
   SOLR-435: if q is "" then it's also acceptable. (dsmiley, hoss)
 
+* SOLR-2919: Added parametric tailoring options to ICUCollationKeyFilterFactory.
+  These can be used to customize range query/sort behavior, for example to
+  support numeric collation, ignore punctuation/whitespace, ignore accents but
+  not case, control whether upper/lowercase values are sorted first, etc.  (rmuir)
+
+* SOLR-2346: Add a chance to set content encoding explicitly via content type 
+  of stream for extracting request handler.  This is convenient when Tika's 
+  auto detector cannot detect encoding, especially the text file is too short 
+  to detect encoding. (koji)
+
+* SOLR-1499: Added SolrEntityProcessor that imports data from another Solr core
+  or instance based on a specified query.
+  (Lance Norskog, Erik Hatcher, Pulkit Singhal, Ahmet Arslan, Luca Cavanna, 
+  Martijn van Groningen)
+
+* SOLR-3190: Minor improvements to SolrEntityProcessor. Add more consistency 
+  between solr parameters and parameters used in SolrEntityProcessor and 
+  ability to specify a custom HttpClient instance.
+  (Luca Cavanna via Martijn van Groningen)
+
+* SOLR-2382: Added pluggable cache support to DIH so that any Entity can be 
+  made cache-able by adding the "cacheImpl" parameter.  Include 
+  "SortedMapBackedCache" to provide in-memory caching (as previously this was 
+  the only option when using CachedSqlEntityProcessor).  Users can provide 
+  their own implementations of DIHCache for other caching strategies.  
+  Deprecate CachedSqlEntityProcessor in favor of specifing "cacheImpl" with
+  SqlEntityProcessor.  Make SolrWriter implement DIHWriter and allow the 
+  possibility of pluggable Writers (DIH writing to something other than Solr). 
+  (James Dyer, Noble Paul)
+
+
 Optimizations
 ----------------------
 * SOLR-1931: Speedup for LukeRequestHandler and admin/schema browser. New parameter
@@ -1164,6 +1307,35 @@ Bug Fixes
 * SOLR-3316: Distributed grouping failed when rows parameter was set to 0 and 
   sometimes returned a wrong hit count as matches. (Cody Young, Martijn van Groningen)
 
+* SOLR-3107: contrib/langid: When using the LangDetect implementation of 
+  langid, set the random seed to 0, so that the same document is detected as 
+  the same language with the same probability every time.  
+  (Christian Moen via rmuir)
+
+* SOLR-2937: Configuring the number of contextual snippets used for 
+  search results clustering. The hl.snippets parameter is now respected
+  by the clustering plugin, can be overridden by carrot.summarySnippets
+  if needed (Stanislaw Osinski).
+
+* SOLR-2938: Clustering on multiple fields. The carrot.title and 
+  carrot.snippet can now take comma- or space-separated lists of
+  field names to cluster (Stanislaw Osinski).
+
+* SOLR-2939: Clustering of multilingual search results. The document's
+  language field be passed in the carrot.lang parameter, the carrot.lcmap
+  parameter enables mapping of language codes to ISO 639 (Stanislaw Osinski).
+
+* SOLR-2940: Passing values for custom Carrot2 fields to Clustering component. 
+  The custom field mapping are defined using the carrot.custom parameter 
+  (Stanislaw Osinski).
+
+* SOLR-2941: NullPointerException on clustering component initialization 
+  when schema does not have a unique key field (Stanislaw Osinski).
+
+* SOLR-2942: ClassCastException when passing non-textual fields to  
+  clustering component (Stanislaw Osinski).
+
+
 Other Changes
 ----------------------
 * SOLR-2922: Upgrade commons-io and commons-lang to 2.1 and 2.6, respectively. (koji)
@@ -1215,6 +1387,16 @@ Other Changes
   repository).  Also updated dependencies jackson-core-asl and
   jackson-mapper-asl (both v1.5.2 -> v1.7.4).  (Dawid Weiss, Steve Rowe)
 
+* SOLR-3295: netcdf jar is excluded from the binary release (and disabled in 
+  ivy.xml) because it requires java 6. If you want to parse this content with 
+  extracting request handler and are willing to use java 6, just add the jar. 
+  (rmuir)
+
+* SOLR-3142: DIH Imports no longer default optimize to true, instead false. 
+  If you want to force all segments to be merged into one, you can specify 
+  this parameter yourself. NOTE: this can be very expensive operation and 
+  usually does not make sense for delta-imports.  (Robert Muir)
+
 Build
 ----------------------
 * SOLR-2487: Add build target to package war without slf4j jars (janhoy)
@@ -1269,6 +1451,9 @@ New Features
   request param that can be used to delete all but the most recent N backups.
   (James Dyer via hossman)
 
+* SOLR-2839: Add alternative implementation to contrib/langid supporting 53 
+  languages, based on http://code.google.com/p/language-detection/ (rmuir)
+
 Optimizations
 ----------------------
 
@@ -1308,6 +1493,9 @@ Bug Fixes
   a wrong number of collation results in the response.
   (Bastiaan Verhoef, James Dyer via Simon Willnauer)
 
+* SOLR-2875: Fix the incorrect url in DIH example tika-data-config.xml 
+  (Shinichiro Abe via koji)
+
  Other Changes
 ----------------------
 
@@ -1345,6 +1533,9 @@ Bug Fixes
 
 * SOLR-2591: Remove commitLockTimeout option from solrconfig.xml (Luca Cavanna via Martijn van Groningen)
 
+* SOLR-2746: Upgraded UIMA dependencies from *-2.3.1-SNAPSHOT.jar to *-2.3.1.jar.
+
+
 ==================  3.4.0  ==================
 
 Upgrading from Solr 3.3
@@ -1491,12 +1682,39 @@ Bug Fixes
   failed due to sort by function changes introduced in SOLR-1297
   (Mitsu Hadeishi, hossman)
 
+* SOLR-2706: contrib/clustering: The carrot.lexicalResourcesDir parameter 
+  now works with absolute directories (Stanislaw Osinski)
+  
+* SOLR-2692: contrib/clustering: Typo in param name fixed: "carrot.fragzise" 
+  changed to "carrot.fragSize" (Stanislaw Osinski).
+
+* SOLR-2644: When using DIH with threads=2 the default logging is set too high
+  (Bill Bell via shalin)
+
+* SOLR-2492: DIH does not commit if only deletes are processed 
+  (James Dyer via shalin)
+
+* SOLR-2186: DataImportHandler's multi-threaded option throws NPE 
+  (Lance Norskog, Frank Wesemann, shalin)
+
+* SOLR-2655: DIH multi threaded mode does not resolve attributes correctly 
+  (Frank Wesemann, shalin)
+
+* SOLR-2695: DIH: Documents are collected in unsynchronized list in 
+  multi-threaded debug mode (Michael McCandless, shalin)
+
+* SOLR-2668: DIH multithreaded mode does not rollback on errors from 
+  EntityProcessor (Frank Wesemann, shalin)
+
  Other Changes
 ----------------------
 
 * SOLR-2629: Eliminate deprecation warnings in some JSPs.
   (Bernd Fehling, hossman)
 
+* SOLR-2743: Remove commons logging from contrib/extraction. (koji)
+
+
 Build
 ----------------------
 
@@ -1568,6 +1786,13 @@ New Features
 
 * SOLR-2610 -- Add an option to delete index through CoreAdmin UNLOAD action (shalin)
 
+* SOLR-2480: Add ignoreTikaException flag to the extraction request handler so 
+  that users can ignore TikaException but index meta data. 
+  (Shinichiro Abe, koji)
+
+* SOLR-2582: Use uniqueKey for error log in UIMAUpdateRequestProcessor.
+  (Tommaso Teofili via koji)
+
 Optimizations
 ----------------------
 
@@ -1587,6 +1812,15 @@ Bug Fixes
   parameter is added to avoid excessive CPU time in extreme cases (e.g. long
   queries with many misspelled words).  (James Dyer via rmuir)
 
+* SOLR-2579: UIMAUpdateRequestProcessor ignore error fails if text.length() < 100.
+  (Elmer Garduno via koji)
+
+* SOLR-2581: UIMAToSolrMapper wrongly instantiates Type with reflection.
+  (Tommaso Teofili via koji)
+
+* SOLR-2551: Check dataimport.properties for write access (if delta-import is 
+  supported in DIH configuration) before starting an import (C S, shalin)
+
 Other Changes
 ----------------------
 
@@ -1626,6 +1860,10 @@ Upgrading from Solr 3.1
   with update.chain rather than update.processor. The latter still works,
   but has been deprecated.
 
+* <uimaConfig/> just beneath <config> ... </config> is no longer supported.
+  It should move to UIMAUpdateRequestProcessorFactory setting.
+  See contrib/uima/README.txt for more details. (SOLR-2436)
+
 Detailed Change List
 ----------------------
 
@@ -1646,6 +1884,18 @@ New Features
   Explanation objects in it's responses instead of
   Explanation.toString  (hossman)
 
+* SOLR-2448: Search results clustering updates: bisecting k-means
+  clustering algorithm added, loading of Carrot2 stop words from
+  <solr.home>/conf/carrot2 (SOLR-2449), using Solr's stopwords.txt
+  for clustering (SOLR-2450), output of cluster scores (SOLR-2505)
+  (Stanislaw Osinski, Dawid Weiss).
+
+* SOLR-2503: extend UIMAUpdateRequestProcessorFactory mapping function to 
+  map feature value to dynamicField. (koji)
+
+* SOLR-2512: add ignoreErrors flag to UIMAUpdateRequestProcessorFactory so 
+  that users can ignore exceptions in AE. (Tommaso Teofili, koji)
+
 Optimizations
 ----------------------
 
@@ -1732,6 +1982,12 @@ Other Changes
 * SOLR-2528: Remove default="true" from HtmlEncoder in example solrconfig.xml,
   because html encoding confuses non-ascii users. (koji)
 
+* SOLR-2387: add mock annotators for improved testing in contrib/uima,
+  (Tommaso Teofili via rmuir)
+
+* SOLR-2436: move uimaConfig to under the uima's update processor in 
+  solrconfig.xml.  (Tommaso Teofili, koji)
+
 Build
 ----------------------
 
@@ -1989,6 +2245,50 @@ New Features
 
 * SOLR-1057: Add PathHierarchyTokenizerFactory. (ryan, koji)
 
+* SOLR-1804: Re-enabled clustering component on trunk, updated to latest 
+  version of Carrot2.  No more LGPL run-time dependencies.  This release of 
+  C2 also does not have a specific Lucene dependency.  
+  (Stanislaw Osinski, gsingers)
+
+* SOLR-2282: Add distributed search support for search result clustering.
+  (Brad Giaccio, Dawid Weiss, Stanislaw Osinski, rmuir, koji)
+
+* SOLR-2210: Add icu-based tokenizer and filters to contrib/analysis-extras (rmuir)
+
+* SOLR-1336: Add SmartChinese (word segmentation for Simplified Chinese) 
+  tokenizer and filters to contrib/analysis-extras (rmuir)
+
+* SOLR-2211,LUCENE-2763: Added UAX29URLEmailTokenizerFactory, which implements
+  UAX#29, a unicode algorithm with good results for most languages, as well as
+  URL and E-mail tokenization according to the relevant RFCs.
+  (Tom Burton-West via rmuir)
+
+* SOLR-2237: Added StempelPolishStemFilterFactory to contrib/analysis-extras (rmuir)
+
+* SOLR-1525: allow DIH to refer to core properties (noble)
+
+* SOLR-1547: DIH TemplateTransformer copy objects more intelligently when the 
+  template is a single variable (noble)
+
+* SOLR-1627: DIH VariableResolver should be fetched just in time (noble)
+
+* SOLR-1583: DIH Create DataSources that return InputStream (noble)
+
+* SOLR-1358: Integration of Tika and DataImportHandler (Akshay Ukey, noble)
+
+* SOLR-1654: TikaEntityProcessor example added DIHExample 
+  (Akshay Ukey via noble)
+
+* SOLR-1678: Move onError handling to DIH framework (noble)
+
+* SOLR-1352: Multi-threaded implementation of DIH (noble)
+
+* SOLR-1721: Add explicit option to run DataImportHandler in synchronous mode 
+  (Alexey Serba via noble)
+
+* SOLR-1737: Added FieldStreamDataSource (noble)
+
+
 Optimizations
 ----------------------
 
@@ -2010,6 +2310,13 @@ Optimizations
 
 * SOLR-2046: add common functions to scripts-util. (koji)
 
+* SOLR-1684: Switch clustering component to use the 
+  SolrIndexSearcher.doc(int, Set<String>) method b/c it can use the document 
+  cache (gsingers)
+
+* SOLR-2200: Improve the performance of DataImportHandler for large 
+  delta-import updates. (Mark Waddle via rmuir)
+
 Bug Fixes
 ----------------------
 * SOLR-1769: Solr 1.4 Replication - Repeater throwing NullPointerException (Jörgen Rydenius via noble)
@@ -2264,6 +2571,70 @@ Bug Fixes
 * SOLR-2192: StreamingUpdateSolrServer.blockUntilFinished was not
   thread safe and could throw an exception. (yonik)
 
+* SOLR-1692: Fix bug in clustering component relating to carrot.produceSummary 
+  option (gsingers)
+
+* SOLR-1756: The date.format setting for extraction request handler causes 
+  ClassCastException when enabled and the config code that parses this setting 
+  does not properly use the same iterator instance. 
+  (Christoph Brill, Mark Miller)
+
+* SOLR-1638: Fixed NullPointerException during DIH import if uniqueKey is not 
+  specified in schema (Akshay Ukey via shalin)
+
+* SOLR-1639: Fixed misleading error message when dataimport.properties is not 
+  writable (shalin)
+
+* SOLR-1598: DIH: Reader used in PlainTextEntityProcessor is not explicitly 
+  closed (Sascha Szott via noble)
+
+* SOLR-1759: DIH: $skipDoc was not working correctly 
+  (Gian Marco Tagliani via noble)
+
+* SOLR-1762: DIH: DateFormatTransformer does not work correctly with 
+  non-default locale dates (tommy chheng via noble)
+
+* SOLR-1757: DIH multithreading sometimes throws NPE (noble)
+
+* SOLR-1766: DIH with threads enabled doesn't respond to the abort command 
+  (Michael Henson via noble)
+
+* SOLR-1767: dataimporter.functions.escapeSql() does not escape backslash 
+  character (Sean Timm via noble)
+
+* SOLR-1811: formatDate should use the current NOW value always 
+  (Sean Timm via noble)
+
+* SOLR-1794: Dataimport of CLOB fields fails when getCharacterStream() is 
+  defined in a superclass. (Gunnar Gauslaa Bergem via rmuir)
+
+* SOLR-2057: DataImportHandler never calls UpdateRequestProcessor.finish()
+  (Drew Farris via koji)
+
+* SOLR-1973: Empty fields in XML update messages confuse DataImportHandler. 
+  (koji)
+
+* SOLR-2221: Use StrUtils.parseBool() to get values of boolean options in DIH.
+  true/on/yes (for TRUE) and false/off/no (for FALSE) can be used for 
+  sub-options (debug, verbose, synchronous, commit, clean, optimize) for 
+  full/delta-import commands. (koji)
+
+* SOLR-2310: DIH: getTimeElapsedSince() returns incorrect hour value when 
+  the elapse is over 60 hours (tom liu via koji)
+
+* SOLR-2252: DIH: When a child entity in nested entities is rootEntity="true", 
+  delta-import doesn't work. (koji)
+
+* SOLR-2330: solrconfig.xml files in example-DIH are broken. (Matt Parker, koji)
+
+* SOLR-1191: resolve DataImportHandler deltaQuery column against pk when pk
+  has a prefix (e.g. pk="book.id" deltaQuery="select id from ..."). More
+  useful error reporting when no match found (previously failed with a
+  NullPointerException in log and no clear user feedback). (gthb via yonik)
+
+* SOLR-2116: Fix TikaConfig classloader bug in TikaEntityProcessor
+  (Martijn van Groningen via hossman)
+
 Other Changes
 ----------------------
 
@@ -2391,6 +2762,16 @@ Other Changes
 * SOLR-141: Errors and Exceptions are formated by ResponseWriter.
   (Mike Sokolov, Rich Cariens, Daniel Naber, ryan)
 
+* SOLR-1902: Upgraded to Tika 0.8 and changed deprecated parse call
+
+* SOLR-1813: Add ICU4j to contrib/extraction libs and add tests for Arabic 
+  extraction (Robert Muir via gsingers)
+
+* SOLR-1821: Fix TimeZone-dependent test failure in TestEvaluatorBag.
+  (Chris Male via rmuir)
+
+* SOLR-2367: Reduced noise in test output by ensuring the properties file 
+  can be written. (Gunnlaugur Thor Briem via rmuir)
 
 Build
 ----------------------
@@ -2476,6 +2857,33 @@ error.  See SOLR-1410 for more informati
  * RussianLowerCaseFilterFactory
  * RussianLetterTokenizerFactory
 
+DIH: Evaluator API has been changed in a non back-compatible way. Users who 
+have developed custom Evaluators will need to change their code according to 
+the new API for it to work. See SOLR-996 for details.
+
+DIH: The formatDate evaluator's syntax has been changed. The new syntax is 
+formatDate(<variable>, '<format_string>'). For example, 
+formatDate(x.date, 'yyyy-MM-dd'). In the old syntax, the date string was 
+written without a single-quotes. The old syntax has been deprecated and will 
+be removed in 1.5, until then, using the old syntax will log a warning.
+
+DIH: The Context API has been changed in a non back-compatible way. In 
+particular, the Context.currentProcess() method now returns a String 
+describing the type of the current import process instead of an int. 
+Similarily, the public constants in Context viz. FULL_DUMP, DELTA_DUMP and 
+FIND_DELTA are changed to a String type. See SOLR-969 for details.
+
+DIH: The EntityProcessor API has been simplified by moving logic for applying 
+transformers and handling multi-row outputs from Transformers into an 
+EntityProcessorWrapper class. The EntityProcessor#destroy is now called once 
+per parent-row at the end of row (end of data). A new method 
+EntityProcessor#close is added which is called at the end of import.
+
+DIH: In Solr 1.3, if the last_index_time was not available (first import) and 
+a delta-import was requested, a full-import was run instead. This is no longer 
+the case. In Solr 1.4 delta import is run with last_index_time as the epoch 
+date (January 1, 1970, 00:00:00 GMT) if last_index_time is not available.
+
 Versions of Major Components
 ----------------------------
 Apache Lucene 2.9.1  (r832363  on 2.9 branch)
@@ -2761,6 +3169,146 @@ New Features
 84. SOLR-1449: Add <lib> elements to solrconfig.xml to specifying additional
     classpath directories and regular expressions. (hossman via yonik)
 
+85. SOLR-1128: Added metadata output to extraction request handler "extract 
+    only" option.  (gsingers)
+
+86. SOLR-1274: Added text serialization output for extractOnly 
+    (Peter Wolanin, gsingers)  
+
+87. SOLR-768: DIH: Set last_index_time variable in full-import command.
+    (Wojtek Piaseczny, Noble Paul via shalin)
+
+88. SOLR-811: Allow a "deltaImportQuery" attribute in SqlEntityProcessor 
+    which is used for delta imports instead of DataImportHandler manipulating 
+    the SQL itself. (Noble Paul via shalin)
+
+89. SOLR-842:  Better error handling in DataImportHandler with options to 
+    abort, skip and continue imports. (Noble Paul, shalin)
+
+90. SOLR-833: DIH: A DataSource to read data from a field as a reader. This 
+    can be used, for example, to read XMLs residing as CLOBs or BLOBs in 
+    databases. (Noble Paul via shalin)
+
+91. SOLR-887: A DIH Transformer to strip HTML tags. (Ahmed Hammad via shalin)
+
+92. SOLR-886: DataImportHandler should rollback when an import fails or it is 
+    aborted (shalin)
+
+93. SOLR-891: A DIH Transformer to read strings from Clob type. 
+    (Noble Paul via shalin)
+
+94. SOLR-812: Configurable JDBC settings in JdbcDataSource including optimized 
+    defaults for read only mode. (David Smiley, Glen Newton, shalin)
+
+95. SOLR-910: Add a few utility commands to the DIH admin page such as full 
+    import, delta import, status, reload config. (Ahmed Hammad via shalin)
+
+96. SOLR-938: Add event listener API for DIH import start and end.
+    (Kay Kay, Noble Paul via shalin)
+
+97. SOLR-801: DIH: Add support for configurable pre-import and post-import 
+    delete query per root-entity. (Noble Paul via shalin)
+
+98. SOLR-988: Add a new scope for session data stored in Context to store 
+    objects across imports. (Noble Paul via shalin)
+
+99. SOLR-980: A PlainTextEntityProcessor which can read from any 
+    DataSource<Reader> and output a String. 
+    (Nathan Adams, Noble Paul via shalin)
+
+100.SOLR-1003: XPathEntityprocessor must allow slurping all text from a given 
+    xml node and its children. (Noble Paul via shalin)
+
+101.SOLR-1001: Allow variables in various attributes of RegexTransformer, 
+    HTMLStripTransformer and NumberFormatTransformer.
+    (Fergus McMenemie, Noble Paul, shalin)
+
+102.SOLR-989: DIH: Expose running statistics from the Context API.
+    (Noble Paul, shalin)
+
+103.SOLR-996: DIH: Expose Context to Evaluators. (Noble Paul, shalin)
+
+104.SOLR-783: DIH: Enhance delta-imports by maintaining separate 
+    last_index_time for each entity. (Jon Baer, Noble Paul via shalin)
+
+105.SOLR-1033: Current entity's namespace is made available to all DIH 
+    Transformers. This allows one to use an output field of TemplateTransformer
+    in other transformers, among other things.
+    (Fergus McMenemie, Noble Paul via shalin)
+
+106.SOLR-1066: New methods in DIH Context to expose Script details. 
+    ScriptTransformer changed to read scripts through the new API methods.
+    (Noble Paul via shalin)
+
+107.SOLR-1062: A DIH LogTransformer which can log data in a given template 
+    format. (Jon Baer, Noble Paul via shalin)
+
+108.SOLR-1065: A DIH ContentStreamDataSource which can accept HTTP POST data 
+    in a content stream. This can be used to push data to Solr instead of 
+    just pulling it from DB/Files/URLs. (Noble Paul via shalin)
+
+109.SOLR-1061: Improve DIH RegexTransformer to create multiple columns from 
+    regex groups. (Noble Paul via shalin)
+
+110.SOLR-1059: Special DIH flags introduced for deleting documents by query or 
+    id, skipping rows and stopping further transforms. Use $deleteDocById, 
+    $deleteDocByQuery for deleting by id and query respectively.  Use $skipRow 
+    to skip the current row but continue with the document. Use $stopTransform 
+    to stop further transformers. New methods are introduced in Context for 
+    deleting by id and query. (Noble Paul, Fergus McMenemie, shalin)
+
+111.SOLR-1076: JdbcDataSource should resolve DIH variables in all its 
+    configuration parameters. (shalin)
+
+112.SOLR-1055: Make DIH JdbcDataSource easily extensible by making the 
+    createConnectionFactory method protected and return a 
+    Callable<Connection> object. (Noble Paul, shalin)
+
+113.SOLR-1058: DIH: JdbcDataSource can lookup javax.sql.DataSource using JNDI. 
+    Use a jndiName attribute to specify the location of the data source.
+    (Jason Shepherd, Noble Paul via shalin)
+
+114.SOLR-1083: A DIH Evaluator for escaping query characters. 
+    (Noble Paul, shalin)
+
+115.SOLR-934: A MailEntityProcessor to enable indexing mails from 
+    POP/IMAP sources into a solr index. (Preetam Rao, shalin)
+
+116.SOLR-1060: A DIH LineEntityProcessor which can stream lines of text from a 
+    given file to be indexed directly or for processing with transformers and
+    child entities.
+    (Fergus McMenemie, Noble Paul, shalin)
+
+117.SOLR-1127: Add support for DIH field name to be templatized.
+    (Noble Paul, shalin)
+
+118.SOLR-1092: Added a new DIH command named 'import' which does not 
+    automatically clean the index. This is useful and more appropriate when one
+    needs to import only some of the entities.
+    (Noble Paul via shalin)
+              
+119.SOLR-1153: DIH 'deltaImportQuery' is honored on child entities as well 
+    (noble) 
+
+120.SOLR-1230: Enhanced dataimport.jsp to work with all DataImportHandler 
+    request handler configurations, rather than just a hardcoded /dataimport 
+    handler. (ehatcher)
+              
+121.SOLR-1235: disallow period (.) in DIH entity names (noble)
+
+122.SOLR-1234: Multiple DIH does not work because all of them write to 
+    dataimport.properties. Use the handler name as the properties file name 
+    (noble)
+
+123.SOLR-1348: Support binary field type in convertType logic in DIH 
+    JdbcDataSource (shalin)
+
+124.SOLR-1406: DIH: Make FileDataSource and FileListEntityProcessor to be more 
+    extensible (Luke Forehand, shalin)
+
+125.SOLR-1437: DIH: XPathEntityProcessor can deal with xpath syntaxes such as 
+    //tagname , /root//tagname (Fergus McMenemie via noble)
+
 
 Optimizations
 ----------------------
@@ -2819,6 +3367,21 @@ Optimizations
 17. SOLR-1296: Enables setting IndexReader's termInfosIndexDivisor via a new attribute to StandardIndexReaderFactory.  Enables
     setting termIndexInterval to IndexWriter via SolrIndexConfig. (Jason Rutherglen, hossman, gsingers)
 
+18. SOLR-846: DIH: Reduce memory consumption during delta import by removing 
+    keys when used (Ricky Leung, Noble Paul via shalin)
+
+19. SOLR-974: DataImportHandler skips commit if no data has been updated.
+    (Wojtek Piaseczny, shalin)
+
+20. SOLR-1004: DIH: Check for abort more frequently during delta-imports.
+    (Marc Sturlese, shalin)
+
+21. SOLR-1098: DIH DateFormatTransformer can cache the format objects.
+    (Noble Paul via shalin)
+
+22. SOLR-1465: Replaced string concatenations with StringBuilder append 
+    calls in DIH XPathRecordReader. (Mark Miller, shalin)
+
 Bug Fixes
 ----------------------
  1. SOLR-774: Fixed logging level display (Sean Timm via Otis Gospodnetic)
@@ -3036,6 +3599,103 @@ Bug Fixes
     caused an error to be returned, although the deletes were
     still executed.  (asmodean via yonik)
 
+76. SOLR-800: Deep copy collections to avoid ConcurrentModificationException 
+    in XPathEntityprocessor while streaming
+    (Kyle Morrison, Noble Paul via shalin)
+
+77. SOLR-823: Request parameter variables ${dataimporter.request.xxx} are not 
+    resolved in DIH (Mck SembWever, Noble Paul, shalin)
+
+78. SOLR-728: Add synchronization to avoid race condition of multiple DIH 
+    imports working concurrently (Walter Ferrara, shalin)
+
+79. SOLR-742: Add ability to create dynamic fields with custom 
+    DataImportHandler transformers (Wojtek Piaseczny, Noble Paul, shalin)
+
+80. SOLR-832: Rows parameter is not honored in DIH non-debug mode and can 
+    abort a running import in debug mode. (Akshay Ukey, shalin)
+
+81. SOLR-838: The DIH VariableResolver obtained from a DataSource's context 
+    does not have current data. (Noble Paul via shalin)
+
+82. SOLR-864: DataImportHandler does not catch and log Errors (shalin)
+
+83. SOLR-873: Fix case-sensitive field names and columns (Jon Baer, shalin)
+
+84. SOLR-893: Unable to delete documents via SQL and deletedPkQuery with 
+    deltaimport (Dan Rosher via shalin)
+
+85. SOLR-888: DIH DateFormatTransformer cannot convert non-string type
+    (Amit Nithian via shalin)
+
+86. SOLR-841: DataImportHandler should throw exception if a field does not 
+    have column attribute (Michael Henson, shalin)
+
+87. SOLR-884: CachedSqlEntityProcessor should check if the cache key is 
+    present in the query results (Noble Paul via shalin)
+
+88. SOLR-985: Fix thread-safety issue with DIH TemplateString for concurrent 
+    imports with multiple cores. (Ryuuichi Kumai via shalin)
+
+89. SOLR-999: DIH XPathRecordReader fails on XMLs with nodes mixed with 
+    CDATA content. (Fergus McMenemie, Noble Paul via shalin)
+
+90. SOLR-1000: DIH FileListEntityProcessor should not apply fileName filter to 
+    directory names. (Fergus McMenemie via shalin)
+
+91. SOLR-1009: Repeated column names result in duplicate values. 
+    (Fergus McMenemie, Noble Paul via shalin)
+
+92. SOLR-1017: Fix DIH thread-safety issue with last_index_time for concurrent 
+    imports in multiple cores due to unsafe usage of SimpleDateFormat by 
+    multiple threads. (Ryuuichi Kumai via shalin)
+
+93. SOLR-1024: Calling abort on DataImportHandler import commits data instead 
+    of calling rollback. (shalin)
+
+94. SOLR-1037: DIH should not add null values in a row returned by 
+    EntityProcessor to documents. (shalin)
+
+95. SOLR-1040: DIH XPathEntityProcessor fails with an xpath like 
+    /feed/entry/link[@type='text/html']/@href (Noble Paul via shalin)
+
+96. SOLR-1042: Fix memory leak in DIH by making TemplateString non-static 
+    member in VariableResolverImpl (Ryuuichi Kumai via shalin)
+
+97. SOLR-1053: IndexOutOfBoundsException in DIH SolrWriter.getResourceAsString 
+    when size of data-config.xml is a multiple of 1024 bytes.
+    (Herb Jiang via shalin)
+
+98. SOLR-1077: IndexOutOfBoundsException with useSolrAddSchema in DIH 
+    XPathEntityProcessor. (Sam Keen, Noble Paul via shalin)
+
+99. SOLR-1080: DIH RegexTransformer should not replace if regex is not matched.
+    (Noble Paul, Fergus McMenemie via shalin)
+
+100.SOLR-1090: DataImportHandler should load the data-config.xml using UTF-8 
+    encoding. (Rui Pereira, shalin)
+
+101.SOLR-1146: ConcurrentModificationException in DataImporter.getStatusMessages
+    (Walter Ferrara, Noble Paul via shalin)
+
+102.SOLR-1229: Fixes for DIH deletedPkQuery, particularly when using 
+    transformed Solr unique id's
+    (Lance Norskog, Noble Paul via ehatcher)
+              
+103.SOLR-1286: Fix the IH commit parameter always defaulting to "true" even 
+    if "false" is explicitly passed in. (Jay Hill, Noble Paul via ehatcher)
+            
+104.SOLR-1323: Reset XPathEntityProcessor's $hasMore/$nextUrl when fetching 
+    next URL (noble, ehatcher)
+
+105.SOLR-1450: DIH: Jdbc connection properties such as batchSize are not 
+    applied if the driver jar is placed in solr_home/lib.
+    (Steve Sun via shalin)
+
+106.SOLR-1474: DIH Delta-import should run even if last_index_time is not set.
+    (shalin)
+
+
 Other Changes
 ----------------------
  1. Upgraded to Lucene 2.4.0 (yonik)
@@ -3175,6 +3835,63 @@ Other Changes
 
 50. SOLR-1357 SolrInputDocument cannot process dynamic fields (Lars Grote via noble)
 
+51. SOLR-1075: Upgrade to Tika 0.3.  See http://www.apache.org/dist/lucene/tika/CHANGES-0.3.txt (gsingers)
+
+52. SOLR-1310: Upgrade to Tika 0.4. Note there are some differences in 
+    detecting Languages now in extracting request handler.
+    See http://www.lucidimagination.com/search/document/d6f1899a85b2a45c/vote_apache_tika_0_4_release_candidate_2#d6f1899a85b2a45c
+    for discussion on language detection.
+    See http://www.apache.org/dist/lucene/tika/CHANGES-0.4.txt. (gsingers)
+
+53. SOLR-782: DIH: Refactored SolrWriter to make it a concrete class and 
+    removed wrappers over SolrInputDocument.  Refactored to load Evaluators 
+    lazily. Removed multiple document nodes in the configuration xml. Removed 
+    support for 'default' variables, they are automatically available as 
+    request parameters. (Noble Paul via shalin)
+
+54. SOLR-964: DIH: XPathEntityProcessor now ignores DTD validations
+    (Fergus McMenemie, Noble Paul via shalin)
+
+55. SOLR-1029: DIH: Standardize Evaluator parameter parsing and added helper 
+    functions for parsing all evaluator parameters in a standard way.
+    (Noble Paul, shalin)
+
+56. SOLR-1081: Change DIH EventListener to be an interface so that components 
+    such as an EntityProcessor or a Transformer can act as an event listener.
+    (Noble Paul, shalin)
+
+57. SOLR-1027: DIH: Alias the 'dataimporter' namespace to a shorter name 'dih'.
+    (Noble Paul via shalin)
+
+58. SOLR-1084: Better error reporting when DIH entity name is a reserved word 
+    and data-config.xml root node is not <dataConfig>.
+    (Noble Paul via shalin)
+
+59. SOLR-1087: Deprecate 'where' attribute in CachedSqlEntityProcessor in 
+    favor of cacheKey and cacheLookup. (Noble Paul via shalin)
+
+60. SOLR-969: Change the FULL_DUMP, DELTA_DUMP, FIND_DELTA constants in DIH 
+    Context to String.  Change Context.currentProcess() to return a string 
+    instead of an integer.  (Kay Kay, Noble Paul, shalin)
+
+61. SOLR-1120: Simplified DIH EntityProcessor API by moving logic for applying 
+    transformers and handling multi-row outputs from Transformers into an 
+    EntityProcessorWrapper class. The behavior of the method 
+    EntityProcessor#destroy has been modified to be called once per parent-row 
+    at the end of row. A new method EntityProcessor#close is added which is 
+    called at the end of import. A new method 
+    Context#getResolvedEntityAttribute is added which returns the resolved 
+    value of an entity's attribute. Introduced a DocWrapper which takes care 
+    of maintaining document level session variables.
+    (Noble Paul, shalin)
+
+62. SOLR-1265: Add DIH variable resolving for URLDataSource properties like 
+    baseUrl.  (Chris Eldredge via ehatcher)
+
+63. SOLR-1269: Better error messages from DIH JdbcDataSource when JDBC Driver 
+    name or SQL is incorrect. (ehatcher, shalin)
+
+
 Build
 ----------------------
  1. SOLR-776: Added in ability to sign artifacts via Ant for releases (gsingers)
@@ -3200,6 +3917,10 @@ Documentation
 
  3. SOLR-1409: Added Solr Powered By Logos    
 
+ 4. SOLR-1369: Add HSQLDB Jar to example-DIH, unzip database and update 
+    instructions.
+
+
 ================== Release 1.3.0 ==================
 
 Upgrading from Solr 1.2
@@ -3545,7 +4266,10 @@ New Features
 71. SOLR-1129 : Support binding dynamic fields to beans in SolrJ (Avlesh Singh , noble)
 
 72. SOLR-920 : Cache and reuse IndexSchema . A new attribute added in solr.xml called 'shareSchema' (noble)
-    
+
+73. SOLR-700: DIH: Allow configurable locales through a locale attribute in 
+    fields for NumberFormatTransformer. (Stefan Oestreicher, shalin)
+
 Changes in runtime behavior
  1. SOLR-559: use Lucene updateDocument, deleteDocuments methods.  This
     removes the maxBufferedDeletes parameter added by SOLR-310 as Lucene
@@ -3760,6 +4484,18 @@ Bug Fixes
 
 50. SOLR-749: Allow QParser and ValueSourceParsers to be extended with same name (hossman, gsingers)
 
+51. SOLR-704: DIH NumberFormatTransformer can silently ignore part of the 
+    string while parsing. Now it tries to use the complete string for parsing. 
+    Failure to do so will result in an exception.
+    (Stefan Oestreicher via shalin)
+
+52. SOLR-729: DIH Context.getDataSource(String) gives current entity's 
+    DataSource instance regardless of argument. (Noble Paul, shalin)
+
+53. SOLR-726: DIH: Jdbc Drivers and DataSources fail to load if placed in 
+    multicore sharedLib or core's lib directory.
+    (Walter Ferrara, Noble Paul, shalin)
+
 Other Changes
  1. SOLR-135: Moved common classes to org.apache.solr.common and altered the
     build scripts to make two jars: apache-solr-1.3.jar and 

Modified: lucene/dev/branches/lucene3312/solr/build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/build.xml?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/build.xml (original)
+++ lucene/dev/branches/lucene3312/solr/build.xml Thu Aug  9 10:20:53 2012
@@ -402,11 +402,11 @@
                   prefix="${fullnamever}"
                   includes="LICENSE.txt NOTICE.txt CHANGES.txt README.txt example/**
                              client/README.txt client/ruby/solr-ruby/** contrib/**/lib/**
-                             contrib/**/README.txt contrib/**/CHANGES.txt"
+                             contrib/**/README.txt licenses/**"
                   excludes="lib/README.committers.txt **/data/ **/logs/* 
                             **/classes/ **/*.sh **/ivy.xml **/build.xml
                             **/bin/ **/*.iml **/*.ipr **/*.iws **/pom.xml 
-                            **/*pom.xml.template **/*.sha1" />
+                            **/*pom.xml.template" />
       <tarfileset dir="${dest}/contrib-lucene-libs-to-package"
                   prefix="${fullnamever}"
                   includes="**" />
@@ -763,4 +763,8 @@
      </delete>
   </target>
 
+  <target name="jar-checksums" depends="clean-jars,resolve">
+    <jar-checksum-macro srcdir="${common-solr.dir}" dstdir="${common-solr.dir}/licenses"/>
+  </target>
+
 </project>

Modified: lucene/dev/branches/lucene3312/solr/cloud-dev/example1.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/cloud-dev/example1.sh?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/cloud-dev/example1.sh (original)
+++ lucene/dev/branches/lucene3312/solr/cloud-dev/example1.sh Thu Aug  9 10:20:53 2012
@@ -7,6 +7,7 @@ rm -r -f example2
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/lucene3312/solr/cloud-dev/example2.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/cloud-dev/example2.sh?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/cloud-dev/example2.sh (original)
+++ lucene/dev/branches/lucene3312/solr/cloud-dev/example2.sh Thu Aug  9 10:20:53 2012
@@ -9,6 +9,7 @@ rm -r -f example4
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/lucene3312/solr/cloud-dev/example3.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/cloud-dev/example3.sh?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/cloud-dev/example3.sh (original)
+++ lucene/dev/branches/lucene3312/solr/cloud-dev/example3.sh Thu Aug  9 10:20:53 2012
@@ -9,6 +9,7 @@ rm -r -f example4
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-extzk-start.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-extzk-start.sh?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-extzk-start.sh (original)
+++ lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-extzk-start.sh Thu Aug  9 10:20:53 2012
@@ -13,7 +13,7 @@ rm -r -f example6
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
-rm -r -f example/solr/data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-multi-start.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-multi-start.sh?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-multi-start.sh (original)
+++ lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-multi-start.sh Thu Aug  9 10:20:53 2012
@@ -13,7 +13,7 @@ rm -r -f example6
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
-rm -r -f example/solr/data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-start-existing.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-start-existing.sh?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-start-existing.sh (original)
+++ lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-start-existing.sh Thu Aug  9 10:20:53 2012
@@ -2,9 +2,6 @@
 
 cd ..
 
-rm -r -f dist
-rm -r -f build
-
 cd example
 java -DzkRun -DSTOP.PORT=7983 -DSTOP.KEY=key -jar start.jar 1>example.log 2>&1 &
 

Modified: lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-start.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-start.sh?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-start.sh (original)
+++ lucene/dev/branches/lucene3312/solr/cloud-dev/solrcloud-start.sh Thu Aug  9 10:20:53 2012
@@ -11,7 +11,7 @@ rm -r -f example6
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
-rm -r -f example/solr/data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/README.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/README.txt?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/README.txt (original)
+++ lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/README.txt Thu Aug  9 10:20:53 2012
@@ -1,3 +1,12 @@
+                    Apache Solr - DataImportHandler
+
+Introduction
+------------
+DataImportHandler is a data import tool for Solr which makes importing data from Databases, XML files and
+HTTP data sources quick and easy.
+
+Important Note
+--------------
 Although Solr strives to be agnostic of the Locale where the server is
 running, some code paths in DataImportHandler are known to depend on the
 System default Locale, Timezone, or Charset.  It is recommended that when

Modified: lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java (original)
+++ lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java Thu Aug  9 10:20:53 2012
@@ -152,7 +152,7 @@ public class ContextImpl extends Context
       }
     } else if (SCOPE_SOLR_CORE.equals(scope)){
       if(dataImporter != null) {
-        dataImporter.getCoreScopeSession().put(name, val);
+        dataImporter.putToCoreScopeSession(name, val);
       }
     }
   }
@@ -171,7 +171,7 @@ public class ContextImpl extends Context
       DocBuilder.DocWrapper doc = getDocument();      
       return doc == null ? null: doc.getSessionAttribute(name);
     } else if (SCOPE_SOLR_CORE.equals(scope)){
-       return dataImporter == null ? null : dataImporter.getCoreScopeSession().get(name);
+       return dataImporter == null ? null : dataImporter.getFromCoreScopeSession(name);
     }
     return null;
   }

Modified: lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java (original)
+++ lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java Thu Aug  9 10:20:53 2012
@@ -71,14 +71,10 @@ public class DataImportHandler extends R
 
   private DataImporter importer;
 
-  private Map<String, Properties> dataSources = new HashMap<String, Properties>();
-
   private boolean debugEnabled = true;
 
   private String myName = "dataimport";
 
-  private Map<String , Object> coreScopeSession = new HashMap<String, Object>();
-
   @Override
   @SuppressWarnings("unchecked")
   public void init(NamedList args) {
@@ -102,21 +98,10 @@ public class DataImportHandler extends R
         }
       }
       debugEnabled = StrUtils.parseBool((String)initArgs.get(ENABLE_DEBUG), true);
-      NamedList defaults = (NamedList) initArgs.get("defaults");
-      if (defaults != null) {
-        String configLoc = (String) defaults.get("config");
-        if (configLoc != null && configLoc.length() != 0) {
-          processConfiguration(defaults);
-          final InputSource is = new InputSource(core.getResourceLoader().openResource(configLoc));
-          is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(configLoc));
-          importer = new DataImporter(is, core,
-                  dataSources, coreScopeSession, myName);
-        }
-      }
+      importer = new DataImporter(core, myName);         
     } catch (Throwable e) {
       LOG.error( DataImporter.MSG.LOAD_EXP, e);
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-              DataImporter.MSG.INVALID_CONFIG, e);
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, DataImporter.MSG.LOAD_EXP, e);
     }
   }
 
@@ -136,48 +121,35 @@ public class DataImportHandler extends R
       }
     }
     SolrParams params = req.getParams();
+    NamedList defaultParams = (NamedList) initArgs.get("defaults");
     RequestInfo requestParams = new RequestInfo(getParamsMap(params), contentStream);
     String command = requestParams.getCommand();
-   
     
-    if (DataImporter.SHOW_CONF_CMD.equals(command)) {
-      // Modify incoming request params to add wt=raw
-      ModifiableSolrParams rawParams = new ModifiableSolrParams(req.getParams());
-      rawParams.set(CommonParams.WT, "raw");
-      req.setParams(rawParams);
-      String dataConfigFile = defaults.get("config");
-      ContentStreamBase content = new ContentStreamBase.StringStream(SolrWriter
-              .getResourceAsString(req.getCore().getResourceLoader().openResource(
-              dataConfigFile)));
-      rsp.add(RawResponseWriter.CONTENT, content);
+    if (DataImporter.SHOW_CONF_CMD.equals(command)) {    
+      String dataConfigFile = params.get("config");
+      String dataConfig = params.get("dataConfig");
+      if(dataConfigFile != null) {
+        dataConfig = SolrWriter.getResourceAsString(req.getCore().getResourceLoader().openResource(dataConfigFile));
+      }
+      if(dataConfig==null)  {
+        rsp.add("status", DataImporter.MSG.NO_CONFIG_FOUND);
+      } else {
+        // Modify incoming request params to add wt=raw
+        ModifiableSolrParams rawParams = new ModifiableSolrParams(req.getParams());
+        rawParams.set(CommonParams.WT, "raw");
+        req.setParams(rawParams);
+        ContentStreamBase content = new ContentStreamBase.StringStream(dataConfig);
+        rsp.add(RawResponseWriter.CONTENT, content);
+      }
       return;
     }
 
     rsp.add("initArgs", initArgs);
     String message = "";
 
-    if (command != null)
+    if (command != null) {
       rsp.add("command", command);
-
-    if (requestParams.isDebug() && (importer == null || !importer.isBusy())) {
-      // Reload the data-config.xml
-      importer = null;
-      if (requestParams.getDataConfig() != null) {
-        try {
-          processConfiguration((NamedList) initArgs.get("defaults"));
-          importer = new DataImporter(new InputSource(new StringReader(requestParams.getDataConfig())), req.getCore()
-                  , dataSources, coreScopeSession, myName);
-        } catch (RuntimeException e) {
-          rsp.add("exception", DebugLogger.getStacktraceString(e));
-          importer = null;
-          return;
-        }
-      } else {
-        inform(req.getCore());
-      }
-      message = DataImporter.MSG.CONFIG_RELOADED;
     }
-
     // If importer is still null
     if (importer == null) {
       rsp.add("status", DataImporter.MSG.NO_INIT);
@@ -192,7 +164,7 @@ public class DataImportHandler extends R
       if (DataImporter.FULL_IMPORT_CMD.equals(command)
               || DataImporter.DELTA_IMPORT_CMD.equals(command) ||
               IMPORT_CMD.equals(command)) {
-
+        importer.maybeReloadConfiguration(requestParams, defaultParams);
         UpdateRequestProcessorChain processorChain =
                 req.getCore().getUpdateProcessingChain(params.get(UpdateParams.UPDATE_CHAIN));
         UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);
@@ -219,10 +191,12 @@ public class DataImportHandler extends R
             importer.runCmd(requestParams, sw);
           }
         }
-      } else if (DataImporter.RELOAD_CONF_CMD.equals(command)) {
-        importer = null;
-        inform(req.getCore());
-        message = DataImporter.MSG.CONFIG_RELOADED;
+      } else if (DataImporter.RELOAD_CONF_CMD.equals(command)) { 
+        if(importer.maybeReloadConfiguration(requestParams, defaultParams)) {
+          message = DataImporter.MSG.CONFIG_RELOADED;
+        } else {
+          message = DataImporter.MSG.CONFIG_NOT_RELOADED;
+        }
       }
     }
     rsp.add("status", importer.isBusy() ? "busy" : "idle");
@@ -248,36 +222,6 @@ public class DataImportHandler extends R
     return result;
   }
 
-  @SuppressWarnings("unchecked")
-  private void processConfiguration(NamedList defaults) {
-    if (defaults == null) {
-      LOG.info("No configuration specified in solrconfig.xml for DataImportHandler");
-      return;
-    }
-
-    LOG.info("Processing configuration from solrconfig.xml: " + defaults);
-
-    dataSources = new HashMap<String, Properties>();
-
-    int position = 0;
-
-    while (position < defaults.size()) {
-      if (defaults.getName(position) == null)
-        break;
-
-      String name = defaults.getName(position);
-      if (name.equals("datasource")) {
-        NamedList dsConfig = (NamedList) defaults.getVal(position);
-        Properties props = new Properties();
-        for (int i = 0; i < dsConfig.size(); i++)
-          props.put(dsConfig.getName(i), dsConfig.getVal(i));
-        LOG.info("Adding properties to datasource: " + props);
-        dataSources.put((String) dsConfig.get("name"), props);
-      }
-      position++;
-    }
-  }
-
   private SolrWriter getSolrWriter(final UpdateRequestProcessor processor,
                                    final SolrResourceLoader loader, final RequestInfo requestParams, SolrQueryRequest req) {
 

Modified: lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java (original)
+++ lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java Thu Aug  9 10:20:53 2012
@@ -22,6 +22,8 @@ import org.apache.solr.core.SolrCore;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.util.SystemIdResolver;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.XMLErrorLogger;
 import org.apache.solr.handler.dataimport.config.ConfigNameConstants;
 import org.apache.solr.handler.dataimport.config.ConfigParseUtil;
@@ -41,9 +43,12 @@ import org.apache.commons.io.IOUtils;
 
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
+
+import java.io.IOException;
 import java.io.StringReader;
 import java.text.SimpleDateFormat;
 import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReentrantLock;
 
@@ -67,14 +72,14 @@ public class DataImporter {
   private DIHConfiguration config;
   private Date indexStartTime;
   private Properties store = new Properties();
-  private Map<String, Properties> dataSourceProps = new HashMap<String, Properties>();
+  private Map<String, Map<String,String>> requestLevelDataSourceProps = new HashMap<String, Map<String,String>>();
   private IndexSchema schema;
   public DocBuilder docBuilder;
   public DocBuilder.Statistics cumulativeStatistics = new DocBuilder.Statistics();
   private SolrCore core;  
+  private Map<String, Object> coreScopeSession = new ConcurrentHashMap<String,Object>();
   private DIHPropertiesWriter propWriter;
   private ReentrantLock importLock = new ReentrantLock();
-  private final Map<String , Object> coreScopeSession;
   private boolean isDeltaImportSupported = false;  
   private final String handlerName;  
   private Map<String, SchemaField> lowerNameVsSchemaField = new HashMap<String, SchemaField>();
@@ -83,12 +88,19 @@ public class DataImporter {
    * Only for testing purposes
    */
   DataImporter() {
-    coreScopeSession = new HashMap<String, Object>();
     createPropertyWriter();
     propWriter.init(this);
     this.handlerName = "dataimport" ;
   }
-
+  
+  DataImporter(SolrCore core, String handlerName) {
+    this.handlerName = handlerName;
+    this.core = core;
+    this.schema = core.getSchema();
+    loadSchemaFieldMap();
+    createPropertyWriter();    
+  }
+  
   private void createPropertyWriter() {
     if (this.core == null
         || !this.core.getCoreDescriptor().getCoreContainer().isZooKeeperAware()) {
@@ -99,27 +111,58 @@ public class DataImporter {
     propWriter.init(this);
   }
 
-  DataImporter(InputSource dataConfig, SolrCore core, Map<String, Properties> ds, Map<String, Object> session, String handlerName) {
-    this.handlerName = handlerName;
-    if (dataConfig == null) {
-      throw new DataImportHandlerException(SEVERE, "Configuration not found");
-    }
-    this.core = core;
-    this.schema = core.getSchema();
-    loadSchemaFieldMap();
-    createPropertyWriter();
-    
-    dataSourceProps = ds;
-    if (session == null)
-      session = new HashMap<String, Object>();
-    coreScopeSession = session;
-    loadDataConfig(dataConfig);
-   
-    for (Entity e : config.getEntities()) {
-      if (e.getAllAttributes().containsKey(SqlEntityProcessor.DELTA_QUERY)) {
-        isDeltaImportSupported = true;
-        break;
+  
+  boolean maybeReloadConfiguration(RequestInfo params,
+      NamedList<?> defaultParams) throws IOException {
+  if (importLock.tryLock()) {
+      boolean success = false;
+      try {        
+        String dataConfigText = params.getDataConfig();
+        String dataconfigFile = (String) params.getConfigFile();        
+        InputSource is = null;
+        if(dataConfigText!=null && dataConfigText.length()>0) {
+          is = new InputSource(new StringReader(dataConfigText));
+        } else if(dataconfigFile!=null) {
+          is = new InputSource(core.getResourceLoader().openResource(dataconfigFile));
+          is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(dataconfigFile));
+          LOG.info("Loading DIH Configuration: " + dataconfigFile);
+        }
+        if(is!=null) {          
+          loadDataConfig(is);
+          success = true;
+        }      
+        
+        Map<String,Map<String,String>> dsProps = new HashMap<String,Map<String,String>>();
+        if(defaultParams!=null) {
+          int position = 0;
+          while (position < defaultParams.size()) {
+            if (defaultParams.getName(position) == null) {
+              break;
+            }
+            String name = defaultParams.getName(position);            
+            if (name.equals("datasource")) {
+              success = true;
+              NamedList dsConfig = (NamedList) defaultParams.getVal(position);
+              LOG.info("Getting configuration for Global Datasource...");              
+              Map<String,String> props = new HashMap<String,String>();
+              for (int i = 0; i < dsConfig.size(); i++) {
+                props.put(dsConfig.getName(i), dsConfig.getVal(i).toString());
+              }
+              LOG.info("Adding properties to datasource: " + props);
+              dsProps.put((String) dsConfig.get("name"), props);
+            }
+            position++;
+          }
+        }
+        requestLevelDataSourceProps = Collections.unmodifiableMap(dsProps);
+      } catch(IOException ioe) {
+        throw ioe;
+      } finally {
+        importLock.unlock();
       }
+      return success;
+    } else {
+      return false;
     }
   }
   
@@ -188,7 +231,13 @@ public class DataImporter {
       LOG.info("Data Configuration loaded successfully");
     } catch (Exception e) {
       throw new DataImportHandlerException(SEVERE,
-              "Exception occurred while initializing context", e);
+              "Data Config problem: " + e.getMessage(), e);
+    }
+    for (Entity e : config.getEntities()) {
+      if (e.getAllAttributes().containsKey(SqlEntityProcessor.DELTA_QUERY)) {
+        isDeltaImportSupported = true;
+        break;
+      }
     }
   }
   
@@ -196,7 +245,7 @@ public class DataImporter {
     DIHConfiguration config;
     List<Map<String, String >> functions = new ArrayList<Map<String ,String>>();
     Script script = null;
-    Map<String, Properties> dataSources = new HashMap<String, Properties>();
+    Map<String, Map<String,String>> dataSources = new HashMap<String, Map<String,String>>();
     
     NodeList dataConfigTags = xmlDocument.getElementsByTagName("dataConfig");
     if(dataConfigTags == null || dataConfigTags.getLength() == 0) {
@@ -232,16 +281,16 @@ public class DataImporter {
     List<Element> dataSourceTags = ConfigParseUtil.getChildNodes(e, DATA_SRC);
     if (!dataSourceTags.isEmpty()) {
       for (Element element : dataSourceTags) {
-        Properties p = new Properties();
+        Map<String,String> p = new HashMap<String,String>();
         HashMap<String, String> attrs = ConfigParseUtil.getAllAttributes(element);
         for (Map.Entry<String, String> entry : attrs.entrySet()) {
-          p.setProperty(entry.getKey(), entry.getValue());
+          p.put(entry.getKey(), entry.getValue());
         }
-        dataSources.put(p.getProperty("name"), p);
+        dataSources.put(p.get("name"), p);
       }
     }
     if(dataSources.get(null) == null){
-      for (Properties properties : dataSources.values()) {
+      for (Map<String,String> properties : dataSources.values()) {
         dataSources.put(null,properties);
         break;        
       } 
@@ -270,17 +319,17 @@ public class DataImporter {
   }
 
   DataSource getDataSourceInstance(Entity key, String name, Context ctx) {
-    Properties p = dataSourceProps.get(name);
+    Map<String,String> p = requestLevelDataSourceProps.get(name);
     if (p == null)
       p = config.getDataSources().get(name);
     if (p == null)
-      p = dataSourceProps.get(null);// for default data source
+      p = requestLevelDataSourceProps.get(null);// for default data source
     if (p == null)
       p = config.getDataSources().get(null);
     if (p == null)  
       throw new DataImportHandlerException(SEVERE,
               "No dataSource :" + name + " available for entity :" + key.getName());
-    String type = p.getProperty(TYPE);
+    String type = p.get(TYPE);
     DataSource dataSrc = null;
     if (type == null) {
       dataSrc = new JdbcDataSource();
@@ -458,6 +507,8 @@ public class DataImporter {
     public static final String DEBUG_NOT_ENABLED = "Debug not enabled. Add a tag <str name=\"enableDebug\">true</str> in solrconfig.xml";
 
     public static final String CONFIG_RELOADED = "Configuration Re-loaded sucessfully";
+    
+    public static final String CONFIG_NOT_RELOADED = "Configuration NOT Re-loaded...Data Importer is busy.";
 
     public static final String TOTAL_DOC_PROCESSED = "Total Documents Processed";
 
@@ -476,13 +527,16 @@ public class DataImporter {
     return schema;
   }
 
-  Map<String, Object> getCoreScopeSession() {
-    return coreScopeSession;
-  }
-
   SolrCore getCore() {
     return core;
   }
+  
+  void putToCoreScopeSession(String key, Object val) {
+    coreScopeSession.put(key, val);
+  }
+  Object getFromCoreScopeSession(String key) {
+    return coreScopeSession.get(key);
+  }
 
   public static final String COLUMN = "column";
 

Modified: lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RequestInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RequestInfo.java?rev=1371142&r1=1371141&r2=1371142&view=diff
==============================================================================
--- lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RequestInfo.java (original)
+++ lucene/dev/branches/lucene3312/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RequestInfo.java Thu Aug  9 10:20:53 2012
@@ -36,6 +36,7 @@ public class RequestInfo {
   private final boolean clean; 
   private final List<String> entitiesToRun;
   private final Map<String,Object> rawParams;
+  private final String configFile;
   private final String dataConfig;  
   
   //TODO:  find a different home for these two...
@@ -98,7 +99,8 @@ public class RequestInfo {
     } else {
       entitiesToRun = null;
     }
-    
+    String configFileParam = (String) requestParams.get("config");
+    configFile = configFileParam;
     String dataConfigParam = (String) requestParams.get("dataConfig");
     if (dataConfigParam != null && dataConfigParam.trim().length() == 0) {
       // Empty data-config param is not valid, change it to null
@@ -161,4 +163,8 @@ public class RequestInfo {
   public DebugInfo getDebugInfo() {
     return debugInfo;
   }
+
+  public String getConfigFile() {
+    return configFile;
+  }
 }
\ No newline at end of file