You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2012/08/04 23:26:15 UTC

svn commit: r1369470 [21/23] - in /lucene/dev/branches/pforcodec_3892: ./ dev-tools/ dev-tools/eclipse/ dev-tools/maven/ dev-tools/scripts/ lucene/ lucene/analysis/ lucene/analysis/common/ lucene/analysis/common/src/java/org/tartarus/snowball/ext/ luce...

Modified: lucene/dev/branches/pforcodec_3892/solr/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/CHANGES.txt?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/CHANGES.txt (original)
+++ lucene/dev/branches/pforcodec_3892/solr/CHANGES.txt Sat Aug  4 21:26:10 2012
@@ -32,7 +32,7 @@ Apache Tika 1.1
 Carrot2 3.5.0
 Velocity 1.6.4 and Velocity Tools 2.0
 Apache UIMA 2.3.1
-Apache ZooKeeper 3.3.5
+Apache ZooKeeper 3.3.6
 
 Upgrading from Solr 4.0.0-ALPHA
 ----------------------
@@ -134,7 +134,7 @@ Bug Fixes
 * SOLR-1781: Replication index directories not always cleaned up. 
   (Markus Jelsma, Terje Sten Bjerkseth, Mark Miller)
 
-* SOLR-3639: Update ZooKeeper to 3.3.5 for a variety of bug fixes. (Mark Miller)
+* SOLR-3639: Update ZooKeeper to 3.3.6 for a variety of bug fixes. (Mark Miller)
 
 * SOLR-3629: Typo in solr.xml persistence when overriding the solrconfig.xml 
   file name using the "config" attribute prevented the override file from being
@@ -173,6 +173,9 @@ Bug Fixes
 * SOLR-3677: Fixed missleading error message in web ui to distinguish between 
   no SolrCores loaded vs. no /admin/ handler available.
   (hossman, steffkes)
+  
+* SOLR-3428: SolrCmdDistributor flushAdds/flushDeletes can cause repeated 
+  adds/deletes to be sent (Mark Miller, Per Steffensen)
 
 Other Changes
 ----------------------
@@ -207,6 +210,14 @@ Other Changes
 
 * SOLR-3682: Fail to parse schema.xml if uniqueKeyField is multivalued (hossman)
 
+* SOLR-2115: DIH no longer requires the "config" parameter to be specified in solrconfig.xml.
+  Instead, the configuration is loaded and parsed with every import.  This allows the use of
+  a different configuration with each import, and makes correcting configuration errors simpler.
+  Also, the configuration itself can be passed using the "dataConfig" parameter rather than
+  using a file (this previously worked in debug mode only).  When configuration errors are 
+  encountered, the error message is returned in XML format.  (James Dyer)
+  
+
 ==================  4.0.0-ALPHA ==================
 More information about this release, including any errata related to the 
 release notes, upgrade instructions, or other changes may be found online at:
@@ -709,6 +720,13 @@ Bug Fixes
 * SOLR-3470: contrib/clustering: custom Carrot2 tokenizer and stemmer factories
   are respected now (Stanislaw Osinski, Dawid Weiss)
 
+* SOLR-3430: Added a new DIH test against a real SQL database.  Fixed problems 
+  revealed by this new test related to  the expanded cache support added to 
+  3.6/SOLR-2382 (James Dyer)
+             
+* SOLR-1958: When using the MailEntityProcessor, import would fail if 
+  fetchMailsSince was not specified. (Max Lynch via James Dyer) 
+
 
 Other Changes
 ----------------------
@@ -862,7 +880,13 @@ Other Changes
 * SOLR-3534: The Dismax and eDismax query parsers will fall back on the 'df' parameter
   when 'qf' is absent.  And if neither is present nor the schema default search field
   then an exception will be thrown now. (dsmiley)
-  
+
+* SOLR-3262: The "threads" feature of DIH is removed (deprecated in Solr 3.6) 
+  (James Dyer)
+
+* SOLR-3422: Refactored DIH internal data classes.  All entities in 
+  data-config.xml must have a name (James Dyer)
+ 
 Documentation
 ----------------------
 
@@ -898,6 +922,17 @@ Bug Fixes:
 * SOLR-3470: contrib/clustering: custom Carrot2 tokenizer and stemmer factories
   are respected now (Stanislaw Osinski, Dawid Weiss)
 
+* SOLR-3360: More DIH bug fixes for the deprecated "threads" parameter.  
+  (Mikhail Khludnev, Claudio R, via James Dyer)
+
+* SOLR-3430: Added a new DIH test against a real SQL database.  Fixed problems 
+  revealed by this new test related to the expanded cache support added to 
+  3.6/SOLR-2382 (James Dyer)
+
+* SOLR-3336: SolrEntityProcessor substitutes most variables at query time.
+  (Michael Kroh, Lance Norskog, via Martijn van Groningen)
+
+
 ==================  3.6.0  ==================
 More information about this release, including any errata related to the 
 release notes, upgrade instructions, or other changes may be found online at:
@@ -1050,6 +1085,27 @@ New Features
   auto detector cannot detect encoding, especially the text file is too short 
   to detect encoding. (koji)
 
+* SOLR-1499: Added SolrEntityProcessor that imports data from another Solr core
+  or instance based on a specified query.
+  (Lance Norskog, Erik Hatcher, Pulkit Singhal, Ahmet Arslan, Luca Cavanna, 
+  Martijn van Groningen)
+
+* SOLR-3190: Minor improvements to SolrEntityProcessor. Add more consistency 
+  between solr parameters and parameters used in SolrEntityProcessor and 
+  ability to specify a custom HttpClient instance.
+  (Luca Cavanna via Martijn van Groningen)
+
+* SOLR-2382: Added pluggable cache support to DIH so that any Entity can be 
+  made cache-able by adding the "cacheImpl" parameter.  Include 
+  "SortedMapBackedCache" to provide in-memory caching (as previously this was 
+  the only option when using CachedSqlEntityProcessor).  Users can provide 
+  their own implementations of DIHCache for other caching strategies.  
+  Deprecate CachedSqlEntityProcessor in favor of specifing "cacheImpl" with
+  SqlEntityProcessor.  Make SolrWriter implement DIHWriter and allow the 
+  possibility of pluggable Writers (DIH writing to something other than Solr). 
+  (James Dyer, Noble Paul)
+
+
 Optimizations
 ----------------------
 * SOLR-1931: Speedup for LukeRequestHandler and admin/schema browser. New parameter
@@ -1296,6 +1352,10 @@ Other Changes
   extracting request handler and are willing to use java 6, just add the jar. 
   (rmuir)
 
+* SOLR-3142: DIH Imports no longer default optimize to true, instead false. 
+  If you want to force all segments to be merged into one, you can specify 
+  this parameter yourself. NOTE: this can be very expensive operation and 
+  usually does not make sense for delta-imports.  (Robert Muir)
 
 Build
 ----------------------
@@ -1393,6 +1453,9 @@ Bug Fixes
   a wrong number of collation results in the response.
   (Bastiaan Verhoef, James Dyer via Simon Willnauer)
 
+* SOLR-2875: Fix the incorrect url in DIH example tika-data-config.xml 
+  (Shinichiro Abe via koji)
+
  Other Changes
 ----------------------
 
@@ -1585,6 +1648,24 @@ Bug Fixes
 * SOLR-2692: contrib/clustering: Typo in param name fixed: "carrot.fragzise" 
   changed to "carrot.fragSize" (Stanislaw Osinski).
 
+* SOLR-2644: When using DIH with threads=2 the default logging is set too high
+  (Bill Bell via shalin)
+
+* SOLR-2492: DIH does not commit if only deletes are processed 
+  (James Dyer via shalin)
+
+* SOLR-2186: DataImportHandler's multi-threaded option throws NPE 
+  (Lance Norskog, Frank Wesemann, shalin)
+
+* SOLR-2655: DIH multi threaded mode does not resolve attributes correctly 
+  (Frank Wesemann, shalin)
+
+* SOLR-2695: DIH: Documents are collected in unsynchronized list in 
+  multi-threaded debug mode (Michael McCandless, shalin)
+
+* SOLR-2668: DIH multithreaded mode does not rollback on errors from 
+  EntityProcessor (Frank Wesemann, shalin)
+
  Other Changes
 ----------------------
 
@@ -1697,6 +1778,9 @@ Bug Fixes
 * SOLR-2581: UIMAToSolrMapper wrongly instantiates Type with reflection.
   (Tommaso Teofili via koji)
 
+* SOLR-2551: Check dataimport.properties for write access (if delta-import is 
+  supported in DIH configuration) before starting an import (C S, shalin)
+
 Other Changes
 ----------------------
 
@@ -2141,6 +2225,30 @@ New Features
 
 * SOLR-2237: Added StempelPolishStemFilterFactory to contrib/analysis-extras (rmuir)
 
+* SOLR-1525: allow DIH to refer to core properties (noble)
+
+* SOLR-1547: DIH TemplateTransformer copy objects more intelligently when the 
+  template is a single variable (noble)
+
+* SOLR-1627: DIH VariableResolver should be fetched just in time (noble)
+
+* SOLR-1583: DIH Create DataSources that return InputStream (noble)
+
+* SOLR-1358: Integration of Tika and DataImportHandler (Akshay Ukey, noble)
+
+* SOLR-1654: TikaEntityProcessor example added DIHExample 
+  (Akshay Ukey via noble)
+
+* SOLR-1678: Move onError handling to DIH framework (noble)
+
+* SOLR-1352: Multi-threaded implementation of DIH (noble)
+
+* SOLR-1721: Add explicit option to run DataImportHandler in synchronous mode 
+  (Alexey Serba via noble)
+
+* SOLR-1737: Added FieldStreamDataSource (noble)
+
+
 Optimizations
 ----------------------
 
@@ -2166,6 +2274,9 @@ Optimizations
   SolrIndexSearcher.doc(int, Set<String>) method b/c it can use the document 
   cache (gsingers)
 
+* SOLR-2200: Improve the performance of DataImportHandler for large 
+  delta-import updates. (Mark Waddle via rmuir)
+
 Bug Fixes
 ----------------------
 * SOLR-1769: Solr 1.4 Replication - Repeater throwing NullPointerException (Jörgen Rydenius via noble)
@@ -2428,6 +2539,61 @@ Bug Fixes
   does not properly use the same iterator instance. 
   (Christoph Brill, Mark Miller)
 
+* SOLR-1638: Fixed NullPointerException during DIH import if uniqueKey is not 
+  specified in schema (Akshay Ukey via shalin)
+
+* SOLR-1639: Fixed misleading error message when dataimport.properties is not 
+  writable (shalin)
+
+* SOLR-1598: DIH: Reader used in PlainTextEntityProcessor is not explicitly 
+  closed (Sascha Szott via noble)
+
+* SOLR-1759: DIH: $skipDoc was not working correctly 
+  (Gian Marco Tagliani via noble)
+
+* SOLR-1762: DIH: DateFormatTransformer does not work correctly with 
+  non-default locale dates (tommy chheng via noble)
+
+* SOLR-1757: DIH multithreading sometimes throws NPE (noble)
+
+* SOLR-1766: DIH with threads enabled doesn't respond to the abort command 
+  (Michael Henson via noble)
+
+* SOLR-1767: dataimporter.functions.escapeSql() does not escape backslash 
+  character (Sean Timm via noble)
+
+* SOLR-1811: formatDate should use the current NOW value always 
+  (Sean Timm via noble)
+
+* SOLR-1794: Dataimport of CLOB fields fails when getCharacterStream() is 
+  defined in a superclass. (Gunnar Gauslaa Bergem via rmuir)
+
+* SOLR-2057: DataImportHandler never calls UpdateRequestProcessor.finish()
+  (Drew Farris via koji)
+
+* SOLR-1973: Empty fields in XML update messages confuse DataImportHandler. 
+  (koji)
+
+* SOLR-2221: Use StrUtils.parseBool() to get values of boolean options in DIH.
+  true/on/yes (for TRUE) and false/off/no (for FALSE) can be used for 
+  sub-options (debug, verbose, synchronous, commit, clean, optimize) for 
+  full/delta-import commands. (koji)
+
+* SOLR-2310: DIH: getTimeElapsedSince() returns incorrect hour value when 
+  the elapse is over 60 hours (tom liu via koji)
+
+* SOLR-2252: DIH: When a child entity in nested entities is rootEntity="true", 
+  delta-import doesn't work. (koji)
+
+* SOLR-2330: solrconfig.xml files in example-DIH are broken. (Matt Parker, koji)
+
+* SOLR-1191: resolve DataImportHandler deltaQuery column against pk when pk
+  has a prefix (e.g. pk="book.id" deltaQuery="select id from ..."). More
+  useful error reporting when no match found (previously failed with a
+  NullPointerException in log and no clear user feedback). (gthb via yonik)
+
+* SOLR-2116: Fix TikaConfig classloader bug in TikaEntityProcessor
+  (Martijn van Groningen via hossman)
 
 Other Changes
 ----------------------
@@ -2561,6 +2727,12 @@ Other Changes
 * SOLR-1813: Add ICU4j to contrib/extraction libs and add tests for Arabic 
   extraction (Robert Muir via gsingers)
 
+* SOLR-1821: Fix TimeZone-dependent test failure in TestEvaluatorBag.
+  (Chris Male via rmuir)
+
+* SOLR-2367: Reduced noise in test output by ensuring the properties file 
+  can be written. (Gunnlaugur Thor Briem via rmuir)
+
 Build
 ----------------------
 
@@ -2645,6 +2817,33 @@ error.  See SOLR-1410 for more informati
  * RussianLowerCaseFilterFactory
  * RussianLetterTokenizerFactory
 
+DIH: Evaluator API has been changed in a non back-compatible way. Users who 
+have developed custom Evaluators will need to change their code according to 
+the new API for it to work. See SOLR-996 for details.
+
+DIH: The formatDate evaluator's syntax has been changed. The new syntax is 
+formatDate(<variable>, '<format_string>'). For example, 
+formatDate(x.date, 'yyyy-MM-dd'). In the old syntax, the date string was 
+written without a single-quotes. The old syntax has been deprecated and will 
+be removed in 1.5, until then, using the old syntax will log a warning.
+
+DIH: The Context API has been changed in a non back-compatible way. In 
+particular, the Context.currentProcess() method now returns a String 
+describing the type of the current import process instead of an int. 
+Similarily, the public constants in Context viz. FULL_DUMP, DELTA_DUMP and 
+FIND_DELTA are changed to a String type. See SOLR-969 for details.
+
+DIH: The EntityProcessor API has been simplified by moving logic for applying 
+transformers and handling multi-row outputs from Transformers into an 
+EntityProcessorWrapper class. The EntityProcessor#destroy is now called once 
+per parent-row at the end of row (end of data). A new method 
+EntityProcessor#close is added which is called at the end of import.
+
+DIH: In Solr 1.3, if the last_index_time was not available (first import) and 
+a delta-import was requested, a full-import was run instead. This is no longer 
+the case. In Solr 1.4 delta import is run with last_index_time as the epoch 
+date (January 1, 1970, 00:00:00 GMT) if last_index_time is not available.
+
 Versions of Major Components
 ----------------------------
 Apache Lucene 2.9.1  (r832363  on 2.9 branch)
@@ -2936,6 +3135,141 @@ New Features
 86. SOLR-1274: Added text serialization output for extractOnly 
     (Peter Wolanin, gsingers)  
 
+87. SOLR-768: DIH: Set last_index_time variable in full-import command.
+    (Wojtek Piaseczny, Noble Paul via shalin)
+
+88. SOLR-811: Allow a "deltaImportQuery" attribute in SqlEntityProcessor 
+    which is used for delta imports instead of DataImportHandler manipulating 
+    the SQL itself. (Noble Paul via shalin)
+
+89. SOLR-842:  Better error handling in DataImportHandler with options to 
+    abort, skip and continue imports. (Noble Paul, shalin)
+
+90. SOLR-833: DIH: A DataSource to read data from a field as a reader. This 
+    can be used, for example, to read XMLs residing as CLOBs or BLOBs in 
+    databases. (Noble Paul via shalin)
+
+91. SOLR-887: A DIH Transformer to strip HTML tags. (Ahmed Hammad via shalin)
+
+92. SOLR-886: DataImportHandler should rollback when an import fails or it is 
+    aborted (shalin)
+
+93. SOLR-891: A DIH Transformer to read strings from Clob type. 
+    (Noble Paul via shalin)
+
+94. SOLR-812: Configurable JDBC settings in JdbcDataSource including optimized 
+    defaults for read only mode. (David Smiley, Glen Newton, shalin)
+
+95. SOLR-910: Add a few utility commands to the DIH admin page such as full 
+    import, delta import, status, reload config. (Ahmed Hammad via shalin)
+
+96. SOLR-938: Add event listener API for DIH import start and end.
+    (Kay Kay, Noble Paul via shalin)
+
+97. SOLR-801: DIH: Add support for configurable pre-import and post-import 
+    delete query per root-entity. (Noble Paul via shalin)
+
+98. SOLR-988: Add a new scope for session data stored in Context to store 
+    objects across imports. (Noble Paul via shalin)
+
+99. SOLR-980: A PlainTextEntityProcessor which can read from any 
+    DataSource<Reader> and output a String. 
+    (Nathan Adams, Noble Paul via shalin)
+
+100.SOLR-1003: XPathEntityprocessor must allow slurping all text from a given 
+    xml node and its children. (Noble Paul via shalin)
+
+101.SOLR-1001: Allow variables in various attributes of RegexTransformer, 
+    HTMLStripTransformer and NumberFormatTransformer.
+    (Fergus McMenemie, Noble Paul, shalin)
+
+102.SOLR-989: DIH: Expose running statistics from the Context API.
+    (Noble Paul, shalin)
+
+103.SOLR-996: DIH: Expose Context to Evaluators. (Noble Paul, shalin)
+
+104.SOLR-783: DIH: Enhance delta-imports by maintaining separate 
+    last_index_time for each entity. (Jon Baer, Noble Paul via shalin)
+
+105.SOLR-1033: Current entity's namespace is made available to all DIH 
+    Transformers. This allows one to use an output field of TemplateTransformer
+    in other transformers, among other things.
+    (Fergus McMenemie, Noble Paul via shalin)
+
+106.SOLR-1066: New methods in DIH Context to expose Script details. 
+    ScriptTransformer changed to read scripts through the new API methods.
+    (Noble Paul via shalin)
+
+107.SOLR-1062: A DIH LogTransformer which can log data in a given template 
+    format. (Jon Baer, Noble Paul via shalin)
+
+108.SOLR-1065: A DIH ContentStreamDataSource which can accept HTTP POST data 
+    in a content stream. This can be used to push data to Solr instead of 
+    just pulling it from DB/Files/URLs. (Noble Paul via shalin)
+
+109.SOLR-1061: Improve DIH RegexTransformer to create multiple columns from 
+    regex groups. (Noble Paul via shalin)
+
+110.SOLR-1059: Special DIH flags introduced for deleting documents by query or 
+    id, skipping rows and stopping further transforms. Use $deleteDocById, 
+    $deleteDocByQuery for deleting by id and query respectively.  Use $skipRow 
+    to skip the current row but continue with the document. Use $stopTransform 
+    to stop further transformers. New methods are introduced in Context for 
+    deleting by id and query. (Noble Paul, Fergus McMenemie, shalin)
+
+111.SOLR-1076: JdbcDataSource should resolve DIH variables in all its 
+    configuration parameters. (shalin)
+
+112.SOLR-1055: Make DIH JdbcDataSource easily extensible by making the 
+    createConnectionFactory method protected and return a 
+    Callable<Connection> object. (Noble Paul, shalin)
+
+113.SOLR-1058: DIH: JdbcDataSource can lookup javax.sql.DataSource using JNDI. 
+    Use a jndiName attribute to specify the location of the data source.
+    (Jason Shepherd, Noble Paul via shalin)
+
+114.SOLR-1083: A DIH Evaluator for escaping query characters. 
+    (Noble Paul, shalin)
+
+115.SOLR-934: A MailEntityProcessor to enable indexing mails from 
+    POP/IMAP sources into a solr index. (Preetam Rao, shalin)
+
+116.SOLR-1060: A DIH LineEntityProcessor which can stream lines of text from a 
+    given file to be indexed directly or for processing with transformers and
+    child entities.
+    (Fergus McMenemie, Noble Paul, shalin)
+
+117.SOLR-1127: Add support for DIH field name to be templatized.
+    (Noble Paul, shalin)
+
+118.SOLR-1092: Added a new DIH command named 'import' which does not 
+    automatically clean the index. This is useful and more appropriate when one
+    needs to import only some of the entities.
+    (Noble Paul via shalin)
+              
+119.SOLR-1153: DIH 'deltaImportQuery' is honored on child entities as well 
+    (noble) 
+
+120.SOLR-1230: Enhanced dataimport.jsp to work with all DataImportHandler 
+    request handler configurations, rather than just a hardcoded /dataimport 
+    handler. (ehatcher)
+              
+121.SOLR-1235: disallow period (.) in DIH entity names (noble)
+
+122.SOLR-1234: Multiple DIH does not work because all of them write to 
+    dataimport.properties. Use the handler name as the properties file name 
+    (noble)
+
+123.SOLR-1348: Support binary field type in convertType logic in DIH 
+    JdbcDataSource (shalin)
+
+124.SOLR-1406: DIH: Make FileDataSource and FileListEntityProcessor to be more 
+    extensible (Luke Forehand, shalin)
+
+125.SOLR-1437: DIH: XPathEntityProcessor can deal with xpath syntaxes such as 
+    //tagname , /root//tagname (Fergus McMenemie via noble)
+
+
 Optimizations
 ----------------------
  1. SOLR-374: Use IndexReader.reopen to save resources by re-using parts of the
@@ -2993,6 +3327,21 @@ Optimizations
 17. SOLR-1296: Enables setting IndexReader's termInfosIndexDivisor via a new attribute to StandardIndexReaderFactory.  Enables
     setting termIndexInterval to IndexWriter via SolrIndexConfig. (Jason Rutherglen, hossman, gsingers)
 
+18. SOLR-846: DIH: Reduce memory consumption during delta import by removing 
+    keys when used (Ricky Leung, Noble Paul via shalin)
+
+19. SOLR-974: DataImportHandler skips commit if no data has been updated.
+    (Wojtek Piaseczny, shalin)
+
+20. SOLR-1004: DIH: Check for abort more frequently during delta-imports.
+    (Marc Sturlese, shalin)
+
+21. SOLR-1098: DIH DateFormatTransformer can cache the format objects.
+    (Noble Paul via shalin)
+
+22. SOLR-1465: Replaced string concatenations with StringBuilder append 
+    calls in DIH XPathRecordReader. (Mark Miller, shalin)
+
 Bug Fixes
 ----------------------
  1. SOLR-774: Fixed logging level display (Sean Timm via Otis Gospodnetic)
@@ -3210,6 +3559,103 @@ Bug Fixes
     caused an error to be returned, although the deletes were
     still executed.  (asmodean via yonik)
 
+76. SOLR-800: Deep copy collections to avoid ConcurrentModificationException 
+    in XPathEntityprocessor while streaming
+    (Kyle Morrison, Noble Paul via shalin)
+
+77. SOLR-823: Request parameter variables ${dataimporter.request.xxx} are not 
+    resolved in DIH (Mck SembWever, Noble Paul, shalin)
+
+78. SOLR-728: Add synchronization to avoid race condition of multiple DIH 
+    imports working concurrently (Walter Ferrara, shalin)
+
+79. SOLR-742: Add ability to create dynamic fields with custom 
+    DataImportHandler transformers (Wojtek Piaseczny, Noble Paul, shalin)
+
+80. SOLR-832: Rows parameter is not honored in DIH non-debug mode and can 
+    abort a running import in debug mode. (Akshay Ukey, shalin)
+
+81. SOLR-838: The DIH VariableResolver obtained from a DataSource's context 
+    does not have current data. (Noble Paul via shalin)
+
+82. SOLR-864: DataImportHandler does not catch and log Errors (shalin)
+
+83. SOLR-873: Fix case-sensitive field names and columns (Jon Baer, shalin)
+
+84. SOLR-893: Unable to delete documents via SQL and deletedPkQuery with 
+    deltaimport (Dan Rosher via shalin)
+
+85. SOLR-888: DIH DateFormatTransformer cannot convert non-string type
+    (Amit Nithian via shalin)
+
+86. SOLR-841: DataImportHandler should throw exception if a field does not 
+    have column attribute (Michael Henson, shalin)
+
+87. SOLR-884: CachedSqlEntityProcessor should check if the cache key is 
+    present in the query results (Noble Paul via shalin)
+
+88. SOLR-985: Fix thread-safety issue with DIH TemplateString for concurrent 
+    imports with multiple cores. (Ryuuichi Kumai via shalin)
+
+89. SOLR-999: DIH XPathRecordReader fails on XMLs with nodes mixed with 
+    CDATA content. (Fergus McMenemie, Noble Paul via shalin)
+
+90. SOLR-1000: DIH FileListEntityProcessor should not apply fileName filter to 
+    directory names. (Fergus McMenemie via shalin)
+
+91. SOLR-1009: Repeated column names result in duplicate values. 
+    (Fergus McMenemie, Noble Paul via shalin)
+
+92. SOLR-1017: Fix DIH thread-safety issue with last_index_time for concurrent 
+    imports in multiple cores due to unsafe usage of SimpleDateFormat by 
+    multiple threads. (Ryuuichi Kumai via shalin)
+
+93. SOLR-1024: Calling abort on DataImportHandler import commits data instead 
+    of calling rollback. (shalin)
+
+94. SOLR-1037: DIH should not add null values in a row returned by 
+    EntityProcessor to documents. (shalin)
+
+95. SOLR-1040: DIH XPathEntityProcessor fails with an xpath like 
+    /feed/entry/link[@type='text/html']/@href (Noble Paul via shalin)
+
+96. SOLR-1042: Fix memory leak in DIH by making TemplateString non-static 
+    member in VariableResolverImpl (Ryuuichi Kumai via shalin)
+
+97. SOLR-1053: IndexOutOfBoundsException in DIH SolrWriter.getResourceAsString 
+    when size of data-config.xml is a multiple of 1024 bytes.
+    (Herb Jiang via shalin)
+
+98. SOLR-1077: IndexOutOfBoundsException with useSolrAddSchema in DIH 
+    XPathEntityProcessor. (Sam Keen, Noble Paul via shalin)
+
+99. SOLR-1080: DIH RegexTransformer should not replace if regex is not matched.
+    (Noble Paul, Fergus McMenemie via shalin)
+
+100.SOLR-1090: DataImportHandler should load the data-config.xml using UTF-8 
+    encoding. (Rui Pereira, shalin)
+
+101.SOLR-1146: ConcurrentModificationException in DataImporter.getStatusMessages
+    (Walter Ferrara, Noble Paul via shalin)
+
+102.SOLR-1229: Fixes for DIH deletedPkQuery, particularly when using 
+    transformed Solr unique id's
+    (Lance Norskog, Noble Paul via ehatcher)
+              
+103.SOLR-1286: Fix the IH commit parameter always defaulting to "true" even 
+    if "false" is explicitly passed in. (Jay Hill, Noble Paul via ehatcher)
+            
+104.SOLR-1323: Reset XPathEntityProcessor's $hasMore/$nextUrl when fetching 
+    next URL (noble, ehatcher)
+
+105.SOLR-1450: DIH: Jdbc connection properties such as batchSize are not 
+    applied if the driver jar is placed in solr_home/lib.
+    (Steve Sun via shalin)
+
+106.SOLR-1474: DIH Delta-import should run even if last_index_time is not set.
+    (shalin)
+
+
 Other Changes
 ----------------------
  1. Upgraded to Lucene 2.4.0 (yonik)
@@ -3357,6 +3803,55 @@ Other Changes
     for discussion on language detection.
     See http://www.apache.org/dist/lucene/tika/CHANGES-0.4.txt. (gsingers)
 
+53. SOLR-782: DIH: Refactored SolrWriter to make it a concrete class and 
+    removed wrappers over SolrInputDocument.  Refactored to load Evaluators 
+    lazily. Removed multiple document nodes in the configuration xml. Removed 
+    support for 'default' variables, they are automatically available as 
+    request parameters. (Noble Paul via shalin)
+
+54. SOLR-964: DIH: XPathEntityProcessor now ignores DTD validations
+    (Fergus McMenemie, Noble Paul via shalin)
+
+55. SOLR-1029: DIH: Standardize Evaluator parameter parsing and added helper 
+    functions for parsing all evaluator parameters in a standard way.
+    (Noble Paul, shalin)
+
+56. SOLR-1081: Change DIH EventListener to be an interface so that components 
+    such as an EntityProcessor or a Transformer can act as an event listener.
+    (Noble Paul, shalin)
+
+57. SOLR-1027: DIH: Alias the 'dataimporter' namespace to a shorter name 'dih'.
+    (Noble Paul via shalin)
+
+58. SOLR-1084: Better error reporting when DIH entity name is a reserved word 
+    and data-config.xml root node is not <dataConfig>.
+    (Noble Paul via shalin)
+
+59. SOLR-1087: Deprecate 'where' attribute in CachedSqlEntityProcessor in 
+    favor of cacheKey and cacheLookup. (Noble Paul via shalin)
+
+60. SOLR-969: Change the FULL_DUMP, DELTA_DUMP, FIND_DELTA constants in DIH 
+    Context to String.  Change Context.currentProcess() to return a string 
+    instead of an integer.  (Kay Kay, Noble Paul, shalin)
+
+61. SOLR-1120: Simplified DIH EntityProcessor API by moving logic for applying 
+    transformers and handling multi-row outputs from Transformers into an 
+    EntityProcessorWrapper class. The behavior of the method 
+    EntityProcessor#destroy has been modified to be called once per parent-row 
+    at the end of row. A new method EntityProcessor#close is added which is 
+    called at the end of import. A new method 
+    Context#getResolvedEntityAttribute is added which returns the resolved 
+    value of an entity's attribute. Introduced a DocWrapper which takes care 
+    of maintaining document level session variables.
+    (Noble Paul, shalin)
+
+62. SOLR-1265: Add DIH variable resolving for URLDataSource properties like 
+    baseUrl.  (Chris Eldredge via ehatcher)
+
+63. SOLR-1269: Better error messages from DIH JdbcDataSource when JDBC Driver 
+    name or SQL is incorrect. (ehatcher, shalin)
+
+
 Build
 ----------------------
  1. SOLR-776: Added in ability to sign artifacts via Ant for releases (gsingers)
@@ -3382,6 +3877,10 @@ Documentation
 
  3. SOLR-1409: Added Solr Powered By Logos    
 
+ 4. SOLR-1369: Add HSQLDB Jar to example-DIH, unzip database and update 
+    instructions.
+
+
 ================== Release 1.3.0 ==================
 
 Upgrading from Solr 1.2
@@ -3727,7 +4226,10 @@ New Features
 71. SOLR-1129 : Support binding dynamic fields to beans in SolrJ (Avlesh Singh , noble)
 
 72. SOLR-920 : Cache and reuse IndexSchema . A new attribute added in solr.xml called 'shareSchema' (noble)
-    
+
+73. SOLR-700: DIH: Allow configurable locales through a locale attribute in 
+    fields for NumberFormatTransformer. (Stefan Oestreicher, shalin)
+
 Changes in runtime behavior
  1. SOLR-559: use Lucene updateDocument, deleteDocuments methods.  This
     removes the maxBufferedDeletes parameter added by SOLR-310 as Lucene
@@ -3942,6 +4444,18 @@ Bug Fixes
 
 50. SOLR-749: Allow QParser and ValueSourceParsers to be extended with same name (hossman, gsingers)
 
+51. SOLR-704: DIH NumberFormatTransformer can silently ignore part of the 
+    string while parsing. Now it tries to use the complete string for parsing. 
+    Failure to do so will result in an exception.
+    (Stefan Oestreicher via shalin)
+
+52. SOLR-729: DIH Context.getDataSource(String) gives current entity's 
+    DataSource instance regardless of argument. (Noble Paul, shalin)
+
+53. SOLR-726: DIH: Jdbc Drivers and DataSources fail to load if placed in 
+    multicore sharedLib or core's lib directory.
+    (Walter Ferrara, Noble Paul, shalin)
+
 Other Changes
  1. SOLR-135: Moved common classes to org.apache.solr.common and altered the
     build scripts to make two jars: apache-solr-1.3.jar and 

Modified: lucene/dev/branches/pforcodec_3892/solr/build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/build.xml?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/build.xml (original)
+++ lucene/dev/branches/pforcodec_3892/solr/build.xml Sat Aug  4 21:26:10 2012
@@ -402,11 +402,11 @@
                   prefix="${fullnamever}"
                   includes="LICENSE.txt NOTICE.txt CHANGES.txt README.txt example/**
                              client/README.txt client/ruby/solr-ruby/** contrib/**/lib/**
-                             contrib/**/README.txt contrib/**/CHANGES.txt"
+                             contrib/**/README.txt licenses/**"
                   excludes="lib/README.committers.txt **/data/ **/logs/* 
                             **/classes/ **/*.sh **/ivy.xml **/build.xml
                             **/bin/ **/*.iml **/*.ipr **/*.iws **/pom.xml 
-                            **/*pom.xml.template **/*.sha1" />
+                            **/*pom.xml.template" />
       <tarfileset dir="${dest}/contrib-lucene-libs-to-package"
                   prefix="${fullnamever}"
                   includes="**" />
@@ -763,4 +763,8 @@
      </delete>
   </target>
 
+  <target name="jar-checksums" depends="clean-jars,resolve">
+    <jar-checksum-macro srcdir="${common-solr.dir}" dstdir="${common-solr.dir}/licenses"/>
+  </target>
+
 </project>

Modified: lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example1.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example1.sh?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example1.sh (original)
+++ lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example1.sh Sat Aug  4 21:26:10 2012
@@ -7,6 +7,7 @@ rm -r -f example2
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example2.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example2.sh?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example2.sh (original)
+++ lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example2.sh Sat Aug  4 21:26:10 2012
@@ -9,6 +9,7 @@ rm -r -f example4
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example3.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example3.sh?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example3.sh (original)
+++ lucene/dev/branches/pforcodec_3892/solr/cloud-dev/example3.sh Sat Aug  4 21:26:10 2012
@@ -9,6 +9,7 @@ rm -r -f example4
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-extzk-start.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-extzk-start.sh?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-extzk-start.sh (original)
+++ lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-extzk-start.sh Sat Aug  4 21:26:10 2012
@@ -13,7 +13,7 @@ rm -r -f example6
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
-rm -r -f example/solr/data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-multi-start.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-multi-start.sh?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-multi-start.sh (original)
+++ lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-multi-start.sh Sat Aug  4 21:26:10 2012
@@ -13,7 +13,7 @@ rm -r -f example6
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
-rm -r -f example/solr/data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-start-existing.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-start-existing.sh?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-start-existing.sh (original)
+++ lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-start-existing.sh Sat Aug  4 21:26:10 2012
@@ -2,9 +2,6 @@
 
 cd ..
 
-rm -r -f dist
-rm -r -f build
-
 cd example
 java -DzkRun -DSTOP.PORT=7983 -DSTOP.KEY=key -jar start.jar 1>example.log 2>&1 &
 

Modified: lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-start.sh
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-start.sh?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-start.sh (original)
+++ lucene/dev/branches/pforcodec_3892/solr/cloud-dev/solrcloud-start.sh Sat Aug  4 21:26:10 2012
@@ -11,7 +11,7 @@ rm -r -f example6
 rm -r -f dist
 rm -r -f build
 rm -r -f example/solr/zoo_data
-rm -r -f example/solr/data
+rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
 ant example dist

Modified: lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/README.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/README.txt?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/README.txt (original)
+++ lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/README.txt Sat Aug  4 21:26:10 2012
@@ -1,3 +1,12 @@
+                    Apache Solr - DataImportHandler
+
+Introduction
+------------
+DataImportHandler is a data import tool for Solr which makes importing data from Databases, XML files and
+HTTP data sources quick and easy.
+
+Important Note
+--------------
 Although Solr strives to be agnostic of the Locale where the server is
 running, some code paths in DataImportHandler are known to depend on the
 System default Locale, Timezone, or Charset.  It is recommended that when

Modified: lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ContextImpl.java Sat Aug  4 21:26:10 2012
@@ -152,7 +152,7 @@ public class ContextImpl extends Context
       }
     } else if (SCOPE_SOLR_CORE.equals(scope)){
       if(dataImporter != null) {
-        dataImporter.getCoreScopeSession().put(name, val);
+        dataImporter.putToCoreScopeSession(name, val);
       }
     }
   }
@@ -171,7 +171,7 @@ public class ContextImpl extends Context
       DocBuilder.DocWrapper doc = getDocument();      
       return doc == null ? null: doc.getSessionAttribute(name);
     } else if (SCOPE_SOLR_CORE.equals(scope)){
-       return dataImporter == null ? null : dataImporter.getCoreScopeSession().get(name);
+       return dataImporter == null ? null : dataImporter.getFromCoreScopeSession(name);
     }
     return null;
   }

Modified: lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java Sat Aug  4 21:26:10 2012
@@ -71,14 +71,10 @@ public class DataImportHandler extends R
 
   private DataImporter importer;
 
-  private Map<String, Properties> dataSources = new HashMap<String, Properties>();
-
   private boolean debugEnabled = true;
 
   private String myName = "dataimport";
 
-  private Map<String , Object> coreScopeSession = new HashMap<String, Object>();
-
   @Override
   @SuppressWarnings("unchecked")
   public void init(NamedList args) {
@@ -102,21 +98,10 @@ public class DataImportHandler extends R
         }
       }
       debugEnabled = StrUtils.parseBool((String)initArgs.get(ENABLE_DEBUG), true);
-      NamedList defaults = (NamedList) initArgs.get("defaults");
-      if (defaults != null) {
-        String configLoc = (String) defaults.get("config");
-        if (configLoc != null && configLoc.length() != 0) {
-          processConfiguration(defaults);
-          final InputSource is = new InputSource(core.getResourceLoader().openResource(configLoc));
-          is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(configLoc));
-          importer = new DataImporter(is, core,
-                  dataSources, coreScopeSession, myName);
-        }
-      }
+      importer = new DataImporter(core, myName);         
     } catch (Throwable e) {
       LOG.error( DataImporter.MSG.LOAD_EXP, e);
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-              DataImporter.MSG.INVALID_CONFIG, e);
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, DataImporter.MSG.LOAD_EXP, e);
     }
   }
 
@@ -136,48 +121,35 @@ public class DataImportHandler extends R
       }
     }
     SolrParams params = req.getParams();
+    NamedList defaultParams = (NamedList) initArgs.get("defaults");
     RequestInfo requestParams = new RequestInfo(getParamsMap(params), contentStream);
     String command = requestParams.getCommand();
-   
     
-    if (DataImporter.SHOW_CONF_CMD.equals(command)) {
-      // Modify incoming request params to add wt=raw
-      ModifiableSolrParams rawParams = new ModifiableSolrParams(req.getParams());
-      rawParams.set(CommonParams.WT, "raw");
-      req.setParams(rawParams);
-      String dataConfigFile = defaults.get("config");
-      ContentStreamBase content = new ContentStreamBase.StringStream(SolrWriter
-              .getResourceAsString(req.getCore().getResourceLoader().openResource(
-              dataConfigFile)));
-      rsp.add(RawResponseWriter.CONTENT, content);
+    if (DataImporter.SHOW_CONF_CMD.equals(command)) {    
+      String dataConfigFile = params.get("config");
+      String dataConfig = params.get("dataConfig");
+      if(dataConfigFile != null) {
+        dataConfig = SolrWriter.getResourceAsString(req.getCore().getResourceLoader().openResource(dataConfigFile));
+      }
+      if(dataConfig==null)  {
+        rsp.add("status", DataImporter.MSG.NO_CONFIG_FOUND);
+      } else {
+        // Modify incoming request params to add wt=raw
+        ModifiableSolrParams rawParams = new ModifiableSolrParams(req.getParams());
+        rawParams.set(CommonParams.WT, "raw");
+        req.setParams(rawParams);
+        ContentStreamBase content = new ContentStreamBase.StringStream(dataConfig);
+        rsp.add(RawResponseWriter.CONTENT, content);
+      }
       return;
     }
 
     rsp.add("initArgs", initArgs);
     String message = "";
 
-    if (command != null)
+    if (command != null) {
       rsp.add("command", command);
-
-    if (requestParams.isDebug() && (importer == null || !importer.isBusy())) {
-      // Reload the data-config.xml
-      importer = null;
-      if (requestParams.getDataConfig() != null) {
-        try {
-          processConfiguration((NamedList) initArgs.get("defaults"));
-          importer = new DataImporter(new InputSource(new StringReader(requestParams.getDataConfig())), req.getCore()
-                  , dataSources, coreScopeSession, myName);
-        } catch (RuntimeException e) {
-          rsp.add("exception", DebugLogger.getStacktraceString(e));
-          importer = null;
-          return;
-        }
-      } else {
-        inform(req.getCore());
-      }
-      message = DataImporter.MSG.CONFIG_RELOADED;
     }
-
     // If importer is still null
     if (importer == null) {
       rsp.add("status", DataImporter.MSG.NO_INIT);
@@ -192,7 +164,7 @@ public class DataImportHandler extends R
       if (DataImporter.FULL_IMPORT_CMD.equals(command)
               || DataImporter.DELTA_IMPORT_CMD.equals(command) ||
               IMPORT_CMD.equals(command)) {
-
+        importer.maybeReloadConfiguration(requestParams, defaultParams);
         UpdateRequestProcessorChain processorChain =
                 req.getCore().getUpdateProcessingChain(params.get(UpdateParams.UPDATE_CHAIN));
         UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);
@@ -219,10 +191,12 @@ public class DataImportHandler extends R
             importer.runCmd(requestParams, sw);
           }
         }
-      } else if (DataImporter.RELOAD_CONF_CMD.equals(command)) {
-        importer = null;
-        inform(req.getCore());
-        message = DataImporter.MSG.CONFIG_RELOADED;
+      } else if (DataImporter.RELOAD_CONF_CMD.equals(command)) { 
+        if(importer.maybeReloadConfiguration(requestParams, defaultParams)) {
+          message = DataImporter.MSG.CONFIG_RELOADED;
+        } else {
+          message = DataImporter.MSG.CONFIG_NOT_RELOADED;
+        }
       }
     }
     rsp.add("status", importer.isBusy() ? "busy" : "idle");
@@ -248,36 +222,6 @@ public class DataImportHandler extends R
     return result;
   }
 
-  @SuppressWarnings("unchecked")
-  private void processConfiguration(NamedList defaults) {
-    if (defaults == null) {
-      LOG.info("No configuration specified in solrconfig.xml for DataImportHandler");
-      return;
-    }
-
-    LOG.info("Processing configuration from solrconfig.xml: " + defaults);
-
-    dataSources = new HashMap<String, Properties>();
-
-    int position = 0;
-
-    while (position < defaults.size()) {
-      if (defaults.getName(position) == null)
-        break;
-
-      String name = defaults.getName(position);
-      if (name.equals("datasource")) {
-        NamedList dsConfig = (NamedList) defaults.getVal(position);
-        Properties props = new Properties();
-        for (int i = 0; i < dsConfig.size(); i++)
-          props.put(dsConfig.getName(i), dsConfig.getVal(i));
-        LOG.info("Adding properties to datasource: " + props);
-        dataSources.put((String) dsConfig.get("name"), props);
-      }
-      position++;
-    }
-  }
-
   private SolrWriter getSolrWriter(final UpdateRequestProcessor processor,
                                    final SolrResourceLoader loader, final RequestInfo requestParams, SolrQueryRequest req) {
 

Modified: lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java Sat Aug  4 21:26:10 2012
@@ -22,6 +22,8 @@ import org.apache.solr.core.SolrCore;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.util.SystemIdResolver;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.XMLErrorLogger;
 import org.apache.solr.handler.dataimport.config.ConfigNameConstants;
 import org.apache.solr.handler.dataimport.config.ConfigParseUtil;
@@ -41,9 +43,12 @@ import org.apache.commons.io.IOUtils;
 
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
+
+import java.io.IOException;
 import java.io.StringReader;
 import java.text.SimpleDateFormat;
 import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReentrantLock;
 
@@ -67,14 +72,14 @@ public class DataImporter {
   private DIHConfiguration config;
   private Date indexStartTime;
   private Properties store = new Properties();
-  private Map<String, Properties> dataSourceProps = new HashMap<String, Properties>();
+  private Map<String, Map<String,String>> requestLevelDataSourceProps = new HashMap<String, Map<String,String>>();
   private IndexSchema schema;
   public DocBuilder docBuilder;
   public DocBuilder.Statistics cumulativeStatistics = new DocBuilder.Statistics();
   private SolrCore core;  
+  private Map<String, Object> coreScopeSession = new ConcurrentHashMap<String,Object>();
   private DIHPropertiesWriter propWriter;
   private ReentrantLock importLock = new ReentrantLock();
-  private final Map<String , Object> coreScopeSession;
   private boolean isDeltaImportSupported = false;  
   private final String handlerName;  
   private Map<String, SchemaField> lowerNameVsSchemaField = new HashMap<String, SchemaField>();
@@ -83,12 +88,19 @@ public class DataImporter {
    * Only for testing purposes
    */
   DataImporter() {
-    coreScopeSession = new HashMap<String, Object>();
     createPropertyWriter();
     propWriter.init(this);
     this.handlerName = "dataimport" ;
   }
-
+  
+  DataImporter(SolrCore core, String handlerName) {
+    this.handlerName = handlerName;
+    this.core = core;
+    this.schema = core.getSchema();
+    loadSchemaFieldMap();
+    createPropertyWriter();    
+  }
+  
   private void createPropertyWriter() {
     if (this.core == null
         || !this.core.getCoreDescriptor().getCoreContainer().isZooKeeperAware()) {
@@ -99,27 +111,58 @@ public class DataImporter {
     propWriter.init(this);
   }
 
-  DataImporter(InputSource dataConfig, SolrCore core, Map<String, Properties> ds, Map<String, Object> session, String handlerName) {
-    this.handlerName = handlerName;
-    if (dataConfig == null) {
-      throw new DataImportHandlerException(SEVERE, "Configuration not found");
-    }
-    this.core = core;
-    this.schema = core.getSchema();
-    loadSchemaFieldMap();
-    createPropertyWriter();
-    
-    dataSourceProps = ds;
-    if (session == null)
-      session = new HashMap<String, Object>();
-    coreScopeSession = session;
-    loadDataConfig(dataConfig);
-   
-    for (Entity e : config.getEntities()) {
-      if (e.getAllAttributes().containsKey(SqlEntityProcessor.DELTA_QUERY)) {
-        isDeltaImportSupported = true;
-        break;
+  
+  boolean maybeReloadConfiguration(RequestInfo params,
+      NamedList<?> defaultParams) throws IOException {
+  if (importLock.tryLock()) {
+      boolean success = false;
+      try {        
+        String dataConfigText = params.getDataConfig();
+        String dataconfigFile = (String) params.getConfigFile();        
+        InputSource is = null;
+        if(dataConfigText!=null && dataConfigText.length()>0) {
+          is = new InputSource(new StringReader(dataConfigText));
+        } else if(dataconfigFile!=null) {
+          is = new InputSource(core.getResourceLoader().openResource(dataconfigFile));
+          is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(dataconfigFile));
+          LOG.info("Loading DIH Configuration: " + dataconfigFile);
+        }
+        if(is!=null) {          
+          loadDataConfig(is);
+          success = true;
+        }      
+        
+        Map<String,Map<String,String>> dsProps = new HashMap<String,Map<String,String>>();
+        if(defaultParams!=null) {
+          int position = 0;
+          while (position < defaultParams.size()) {
+            if (defaultParams.getName(position) == null) {
+              break;
+            }
+            String name = defaultParams.getName(position);            
+            if (name.equals("datasource")) {
+              success = true;
+              NamedList dsConfig = (NamedList) defaultParams.getVal(position);
+              LOG.info("Getting configuration for Global Datasource...");              
+              Map<String,String> props = new HashMap<String,String>();
+              for (int i = 0; i < dsConfig.size(); i++) {
+                props.put(dsConfig.getName(i), dsConfig.getVal(i).toString());
+              }
+              LOG.info("Adding properties to datasource: " + props);
+              dsProps.put((String) dsConfig.get("name"), props);
+            }
+            position++;
+          }
+        }
+        requestLevelDataSourceProps = Collections.unmodifiableMap(dsProps);
+      } catch(IOException ioe) {
+        throw ioe;
+      } finally {
+        importLock.unlock();
       }
+      return success;
+    } else {
+      return false;
     }
   }
   
@@ -188,7 +231,13 @@ public class DataImporter {
       LOG.info("Data Configuration loaded successfully");
     } catch (Exception e) {
       throw new DataImportHandlerException(SEVERE,
-              "Exception occurred while initializing context", e);
+              "Data Config problem: " + e.getMessage(), e);
+    }
+    for (Entity e : config.getEntities()) {
+      if (e.getAllAttributes().containsKey(SqlEntityProcessor.DELTA_QUERY)) {
+        isDeltaImportSupported = true;
+        break;
+      }
     }
   }
   
@@ -196,7 +245,7 @@ public class DataImporter {
     DIHConfiguration config;
     List<Map<String, String >> functions = new ArrayList<Map<String ,String>>();
     Script script = null;
-    Map<String, Properties> dataSources = new HashMap<String, Properties>();
+    Map<String, Map<String,String>> dataSources = new HashMap<String, Map<String,String>>();
     
     NodeList dataConfigTags = xmlDocument.getElementsByTagName("dataConfig");
     if(dataConfigTags == null || dataConfigTags.getLength() == 0) {
@@ -232,16 +281,16 @@ public class DataImporter {
     List<Element> dataSourceTags = ConfigParseUtil.getChildNodes(e, DATA_SRC);
     if (!dataSourceTags.isEmpty()) {
       for (Element element : dataSourceTags) {
-        Properties p = new Properties();
+        Map<String,String> p = new HashMap<String,String>();
         HashMap<String, String> attrs = ConfigParseUtil.getAllAttributes(element);
         for (Map.Entry<String, String> entry : attrs.entrySet()) {
-          p.setProperty(entry.getKey(), entry.getValue());
+          p.put(entry.getKey(), entry.getValue());
         }
-        dataSources.put(p.getProperty("name"), p);
+        dataSources.put(p.get("name"), p);
       }
     }
     if(dataSources.get(null) == null){
-      for (Properties properties : dataSources.values()) {
+      for (Map<String,String> properties : dataSources.values()) {
         dataSources.put(null,properties);
         break;        
       } 
@@ -270,17 +319,17 @@ public class DataImporter {
   }
 
   DataSource getDataSourceInstance(Entity key, String name, Context ctx) {
-    Properties p = dataSourceProps.get(name);
+    Map<String,String> p = requestLevelDataSourceProps.get(name);
     if (p == null)
       p = config.getDataSources().get(name);
     if (p == null)
-      p = dataSourceProps.get(null);// for default data source
+      p = requestLevelDataSourceProps.get(null);// for default data source
     if (p == null)
       p = config.getDataSources().get(null);
     if (p == null)  
       throw new DataImportHandlerException(SEVERE,
               "No dataSource :" + name + " available for entity :" + key.getName());
-    String type = p.getProperty(TYPE);
+    String type = p.get(TYPE);
     DataSource dataSrc = null;
     if (type == null) {
       dataSrc = new JdbcDataSource();
@@ -458,6 +507,8 @@ public class DataImporter {
     public static final String DEBUG_NOT_ENABLED = "Debug not enabled. Add a tag <str name=\"enableDebug\">true</str> in solrconfig.xml";
 
     public static final String CONFIG_RELOADED = "Configuration Re-loaded sucessfully";
+    
+    public static final String CONFIG_NOT_RELOADED = "Configuration NOT Re-loaded...Data Importer is busy.";
 
     public static final String TOTAL_DOC_PROCESSED = "Total Documents Processed";
 
@@ -476,13 +527,16 @@ public class DataImporter {
     return schema;
   }
 
-  Map<String, Object> getCoreScopeSession() {
-    return coreScopeSession;
-  }
-
   SolrCore getCore() {
     return core;
   }
+  
+  void putToCoreScopeSession(String key, Object val) {
+    coreScopeSession.put(key, val);
+  }
+  Object getFromCoreScopeSession(String key) {
+    return coreScopeSession.get(key);
+  }
 
   public static final String COLUMN = "column";
 

Modified: lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RequestInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RequestInfo.java?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RequestInfo.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RequestInfo.java Sat Aug  4 21:26:10 2012
@@ -36,6 +36,7 @@ public class RequestInfo {
   private final boolean clean; 
   private final List<String> entitiesToRun;
   private final Map<String,Object> rawParams;
+  private final String configFile;
   private final String dataConfig;  
   
   //TODO:  find a different home for these two...
@@ -98,7 +99,8 @@ public class RequestInfo {
     } else {
       entitiesToRun = null;
     }
-    
+    String configFileParam = (String) requestParams.get("config");
+    configFile = configFileParam;
     String dataConfigParam = (String) requestParams.get("dataConfig");
     if (dataConfigParam != null && dataConfigParam.trim().length() == 0) {
       // Empty data-config param is not valid, change it to null
@@ -161,4 +163,8 @@ public class RequestInfo {
   public DebugInfo getDebugInfo() {
     return debugInfo;
   }
+
+  public String getConfigFile() {
+    return configFile;
+  }
 }
\ No newline at end of file

Modified: lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java Sat Aug  4 21:26:10 2012
@@ -4,7 +4,6 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
-import java.util.Properties;
 
 import org.apache.solr.handler.dataimport.DataImporter;
 import org.w3c.dom.Element;
@@ -49,8 +48,8 @@ public class DIHConfiguration {
   private final String onImportEnd;
   private final List<Map<String, String>> functions;
   private final Script script;
-  private final Map<String, Properties> dataSources;
-  public DIHConfiguration(Element element, DataImporter di, List<Map<String, String>> functions, Script script, Map<String, Properties> dataSources) {
+  private final Map<String, Map<String,String>> dataSources;
+  public DIHConfiguration(Element element, DataImporter di, List<Map<String, String>> functions, Script script, Map<String, Map<String,String>> dataSources) {
     this.deleteQuery = ConfigParseUtil.getStringAttribute(element, "deleteQuery", null);
     this.onImportStart = ConfigParseUtil.getStringAttribute(element, "onImportStart", null);
     this.onImportEnd = ConfigParseUtil.getStringAttribute(element, "onImportEnd", null);
@@ -90,7 +89,7 @@ public class DIHConfiguration {
   public List<Map<String,String>> getFunctions() {
     return functions;
   }
-  public Map<String,Properties> getDataSources() {
+  public Map<String,Map<String,String>> getDataSources() {
     return dataSources;
   }
   public Script getScript() {

Modified: lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/test-files/dih/solr/collection1/conf/dataimport-solrconfig-end-to-end.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/test-files/dih/solr/collection1/conf/dataimport-solrconfig-end-to-end.xml?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/test-files/dih/solr/collection1/conf/dataimport-solrconfig-end-to-end.xml (original)
+++ lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/test-files/dih/solr/collection1/conf/dataimport-solrconfig-end-to-end.xml Sat Aug  4 21:26:10 2012
@@ -31,11 +31,7 @@
        <str name="echoParams">explicit</str>       
     </lst>
   </requestHandler>  
-  <requestHandler name="/dataimport-end-to-end" class="org.apache.solr.handler.dataimport.DataImportHandler">
-    <lst name="defaults">
-  	  <str name="config">data-config-end-to-end.xml</str>
-    </lst>
-  </requestHandler>   
+  <requestHandler name="/dataimport-end-to-end" class="org.apache.solr.handler.dataimport.DataImportHandler" />  
   <requestHandler name="/search" class="org.apache.solr.handler.component.SearchHandler">
     <lst name="defaults">
       <str name="echoParams">explicit</str>

Modified: lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDIHEndToEnd.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDIHEndToEnd.java?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDIHEndToEnd.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestDIHEndToEnd.java Sat Aug  4 21:26:10 2012
@@ -31,7 +31,8 @@ public class TestDIHEndToEnd extends Abs
   }
   @Test
   public void testEndToEnd() throws Exception {
-    LocalSolrQueryRequest request = lrf.makeRequest("command", "full-import",
+    LocalSolrQueryRequest request = lrf.makeRequest(
+        "command", "full-import", "config", "data-config-end-to-end.xml",
         "clean", "true", "commit", "true", "synchronous", "true", "indent", "true");
     h.query("/dataimport-end-to-end", request);
     assertQ(req("*:*"), "//*[@numFound='20']");

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/SolrLogFormatter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/SolrLogFormatter.java?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/SolrLogFormatter.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/SolrLogFormatter.java Sat Aug  4 21:26:10 2012
@@ -250,10 +250,10 @@ sb.append("(group_name=").append(tg.getN
 
     /*** Isn't core specific... prob better logged from zkController
     if (info != null) {
-      CloudState cloudState = zkController.getCloudState();
-      if (info.cloudState != cloudState) {
+      ClusterState clusterState = zkController.getClusterState();
+      if (info.clusterState != clusterState) {
         // something has changed in the matrix...
-        sb.append(zkController.getBaseUrl() + " sees new CloudState:");
+        sb.append(zkController.getBaseUrl() + " sees new ClusterState:");
       }
     }
     ***/
@@ -263,7 +263,7 @@ sb.append("(group_name=").append(tg.getN
 
   private Map<String,String> getCoreProps(ZkController zkController, SolrCore core) {
     final String collection = core.getCoreDescriptor().getCloudDescriptor().getCollectionName();
-    ZkNodeProps props = zkController.getCloudState().getShardProps(collection,  ZkStateReader.getCoreNodeName(zkController.getNodeName(), core.getName()));
+    ZkNodeProps props = zkController.getClusterState().getShardProps(collection,  ZkStateReader.getCoreNodeName(zkController.getNodeName(), core.getName()));
     if(props!=null) {
       return props.getProperties(); 
     }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/AssignShard.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/AssignShard.java?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/AssignShard.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/AssignShard.java Sat Aug  4 21:26:10 2012
@@ -24,7 +24,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.solr.common.cloud.CloudState;
+import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.Slice;
 
 public class AssignShard {
@@ -36,7 +36,7 @@ public class AssignShard {
    * @param state
    * @return the assigned shard id
    */
-  public static String assignShard(String collection, CloudState state, Integer numShards) {
+  public static String assignShard(String collection, ClusterState state, Integer numShards) {
     if (numShards == null) {
       numShards = 1;
     }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java Sat Aug  4 21:26:10 2012
@@ -5,7 +5,7 @@ import java.util.Map;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.cloud.CloudState;
+import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
@@ -13,7 +13,6 @@ import org.apache.solr.common.cloud.ZkNo
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.handler.component.ShardHandler;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.KeeperException.NodeExistsException;
@@ -195,8 +194,8 @@ final class ShardLeaderElectionContext e
   }
   
   private boolean shouldIBeLeader(ZkNodeProps leaderProps) {
-    CloudState cloudState = zkController.getZkStateReader().getCloudState();
-    Map<String,Slice> slices = cloudState.getSlices(this.collection);
+    ClusterState clusterState = zkController.getZkStateReader().getClusterState();
+    Map<String,Slice> slices = clusterState.getSlices(this.collection);
     Slice slice = slices.get(shardId);
     Map<String,ZkNodeProps> shards = slice.getShards();
     boolean foundSomeoneElseActive = false;
@@ -206,7 +205,7 @@ final class ShardLeaderElectionContext e
       if (new ZkCoreNodeProps(shard.getValue()).getCoreUrl().equals(
               new ZkCoreNodeProps(leaderProps).getCoreUrl())) {
         if (state.equals(ZkStateReader.ACTIVE)
-          && cloudState.liveNodesContain(shard.getValue().get(
+          && clusterState.liveNodesContain(shard.getValue().get(
               ZkStateReader.NODE_NAME_PROP))) {
           // we are alive
           return true;
@@ -214,7 +213,7 @@ final class ShardLeaderElectionContext e
       }
       
       if ((state.equals(ZkStateReader.ACTIVE))
-          && cloudState.liveNodesContain(shard.getValue().get(
+          && clusterState.liveNodesContain(shard.getValue().get(
               ZkStateReader.NODE_NAME_PROP))
           && !new ZkCoreNodeProps(shard.getValue()).getCoreUrl().equals(
               new ZkCoreNodeProps(leaderProps).getCoreUrl())) {
@@ -226,8 +225,8 @@ final class ShardLeaderElectionContext e
   }
   
   private boolean anyoneElseActive() {
-    CloudState cloudState = zkController.getZkStateReader().getCloudState();
-    Map<String,Slice> slices = cloudState.getSlices(this.collection);
+    ClusterState clusterState = zkController.getZkStateReader().getClusterState();
+    Map<String,Slice> slices = clusterState.getSlices(this.collection);
     Slice slice = slices.get(shardId);
     Map<String,ZkNodeProps> shards = slice.getShards();
 
@@ -236,7 +235,7 @@ final class ShardLeaderElectionContext e
 
       
       if ((state.equals(ZkStateReader.ACTIVE))
-          && cloudState.liveNodesContain(shard.getValue().get(
+          && clusterState.liveNodesContain(shard.getValue().get(
               ZkStateReader.NODE_NAME_PROP))) {
         return true;
       }
@@ -250,16 +249,13 @@ final class ShardLeaderElectionContext e
 final class OverseerElectionContext extends ElectionContext {
   
   private final SolrZkClient zkClient;
-  private final ZkStateReader stateReader;
-  private ShardHandler shardHandler;
-  private String adminPath;
-
-  public OverseerElectionContext(ShardHandler shardHandler, String adminPath, final String zkNodeName, ZkStateReader stateReader) {
-    super(zkNodeName, "/overseer_elect", "/overseer_elect/leader", null, stateReader.getZkClient());
-    this.stateReader = stateReader;
-    this.shardHandler = shardHandler;
-    this.adminPath = adminPath;
-    this.zkClient = stateReader.getZkClient();
+  private Overseer overseer;
+
+
+  public OverseerElectionContext(SolrZkClient zkClient, Overseer overseer, final String zkNodeName) {
+    super(zkNodeName, "/overseer_elect", "/overseer_elect/leader", null, zkClient);
+    this.overseer = overseer;
+    this.zkClient = zkClient;
   }
 
   @Override
@@ -281,7 +277,7 @@ final class OverseerElectionContext exte
           CreateMode.EPHEMERAL, true);
     }
   
-    new Overseer(shardHandler, adminPath, stateReader, id);
+    overseer.start(id);
   }
   
 }

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/Overseer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/Overseer.java?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/Overseer.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/Overseer.java Sat Aug  4 21:26:10 2012
@@ -24,7 +24,7 @@ import java.util.Map;
 import java.util.Map.Entry;
 
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.cloud.CloudState;
+import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
@@ -47,7 +47,7 @@ public class Overseer {
 
   private static Logger log = LoggerFactory.getLogger(Overseer.class);
   
-  private static class CloudStateUpdater implements Runnable {
+  private class ClusterStateUpdater implements Runnable {
     
     private static final String DELETECORE = "deletecore";
     private final ZkStateReader reader;
@@ -59,7 +59,7 @@ public class Overseer {
     //If Overseer dies while extracting the main queue a new overseer will start from this queue 
     private final DistributedQueue workQueue;
     
-    public CloudStateUpdater(final ZkStateReader reader, final String myId) {
+    public ClusterStateUpdater(final ZkStateReader reader, final String myId) {
       this.zkClient = reader.getZkClient();
       this.stateUpdateQueue = getInQueue(zkClient);
       this.workQueue = getInternalQueue(zkClient);
@@ -70,7 +70,7 @@ public class Overseer {
     @Override
     public void run() {
         
-      if(amILeader()) {
+      if(amILeader() && !Overseer.this.isClosed) {
         // see if there's something left from the previous Overseer and re
         // process all events that were not persisted into cloud state
           synchronized (reader.getUpdateLock()) { //XXX this only protects against edits inside single node
@@ -78,17 +78,17 @@ public class Overseer {
               byte[] head = workQueue.peek();
               
               if (head != null) {
-                reader.updateCloudState(true);
-                CloudState cloudState = reader.getCloudState();
+                reader.updateClusterState(true);
+                ClusterState clusterState = reader.getClusterState();
                 log.info("Replaying operations from work queue.");
                 
                 while (head != null && amILeader()) {
                   final ZkNodeProps message = ZkNodeProps.load(head);
                   final String operation = message
                       .get(QUEUE_OPERATION);
-                  cloudState = processMessage(cloudState, message, operation);
+                  clusterState = processMessage(clusterState, message, operation);
                   zkClient.setData(ZkStateReader.CLUSTER_STATE,
-                      ZkStateReader.toJSON(cloudState), true);
+                      ZkStateReader.toJSON(clusterState), true);
                   workQueue.remove();
                   head = workQueue.peek();
                 }
@@ -110,26 +110,26 @@ public class Overseer {
         }
       
       log.info("Starting to work on the main queue");
-      while (amILeader()) {
+      while (amILeader() && !isClosed) {
         synchronized (reader.getUpdateLock()) {
           try {
             byte[] head = stateUpdateQueue.peek();
             
             if (head != null) {
-              reader.updateCloudState(true);
-              CloudState cloudState = reader.getCloudState();
+              reader.updateClusterState(true);
+              ClusterState clusterState = reader.getClusterState();
               
               while (head != null) {
                 final ZkNodeProps message = ZkNodeProps.load(head);
                 final String operation = message.get(QUEUE_OPERATION);
                 
-                cloudState = processMessage(cloudState, message, operation);
+                clusterState = processMessage(clusterState, message, operation);
                 byte[] processed = stateUpdateQueue.remove();
                 workQueue.offer(processed);
                 head = stateUpdateQueue.peek();
               }
               zkClient.setData(ZkStateReader.CLUSTER_STATE,
-                  ZkStateReader.toJSON(cloudState), true);
+                  ZkStateReader.toJSON(clusterState), true);
             }
             // clean work queue
             while (workQueue.poll() != null);
@@ -157,12 +157,12 @@ public class Overseer {
       }
     }
 
-    private CloudState processMessage(CloudState cloudState,
+    private ClusterState processMessage(ClusterState clusterState,
         final ZkNodeProps message, final String operation) {
       if ("state".equals(operation)) {
-        cloudState = updateState(cloudState, message);
+        clusterState = updateState(clusterState, message);
       } else if (DELETECORE.equals(operation)) {
-        cloudState = removeCore(cloudState, message);
+        clusterState = removeCore(clusterState, message);
       } else if (ZkStateReader.LEADER_PROP.equals(operation)) {
         StringBuilder sb = new StringBuilder();
         String baseUrl = message.get(ZkStateReader.BASE_URL_PROP);
@@ -172,14 +172,14 @@ public class Overseer {
         sb.append(coreName == null ? "" : coreName);
         if (!(sb.substring(sb.length() - 1).equals("/"))) sb
             .append("/");
-        cloudState = setShardLeader(cloudState,
+        clusterState = setShardLeader(clusterState,
             message.get(ZkStateReader.COLLECTION_PROP),
             message.get(ZkStateReader.SHARD_ID_PROP), sb.toString());
       } else {
         throw new RuntimeException("unknown operation:" + operation
             + " contents:" + message.getProperties());
       }
-      return cloudState;
+      return clusterState;
     }
       
       private boolean amILeader() {
@@ -199,7 +199,7 @@ public class Overseer {
       /**
        * Try to assign core to the cluster. 
        */
-      private CloudState updateState(CloudState state, final ZkNodeProps message) {
+      private ClusterState updateState(ClusterState state, final ZkNodeProps message) {
         final String collection = message.get(ZkStateReader.COLLECTION_PROP);
         final String zkCoreNodeName = message.get(ZkStateReader.NODE_NAME_PROP) + "_" + message.get(ZkStateReader.CORE_NAME_PROP);
         final Integer numShards = message.get(ZkStateReader.NUM_SHARDS_PROP)!=null?Integer.parseInt(message.get(ZkStateReader.NUM_SHARDS_PROP)):null;
@@ -214,7 +214,7 @@ public class Overseer {
         String shardId = message.get(ZkStateReader.SHARD_ID_PROP);
         if (shardId == null) {
           String nodeName = message.get(ZkStateReader.NODE_NAME_PROP);
-          //get shardId from CloudState
+          //get shardId from ClusterState
           shardId = getAssignedId(state, nodeName, message);
         }
         if(shardId == null) {
@@ -242,11 +242,11 @@ public class Overseer {
           shardProps.put(zkCoreNodeName, zkProps);
 
           slice = new Slice(shardId, shardProps);
-          CloudState newCloudState = updateSlice(state, collection, slice);
-          return newCloudState;
+          ClusterState newClusterState = updateSlice(state, collection, slice);
+          return newClusterState;
       }
 
-      private CloudState createCollection(CloudState state, String collectionName, int numShards) {
+      private ClusterState createCollection(ClusterState state, String collectionName, int numShards) {
         Map<String, Map<String, Slice>> newStates = new LinkedHashMap<String,Map<String, Slice>>();
         Map<String, Slice> newSlices = new LinkedHashMap<String,Slice>();
         newStates.putAll(state.getCollectionStates());
@@ -255,14 +255,14 @@ public class Overseer {
           newSlices.put(sliceName, new Slice(sliceName, Collections.EMPTY_MAP));
         }
         newStates.put(collectionName, newSlices);
-        CloudState newCloudState = new CloudState(state.getLiveNodes(), newStates);
-        return newCloudState;
+        ClusterState newClusterState = new ClusterState(state.getLiveNodes(), newStates);
+        return newClusterState;
       }
       
       /*
        * Return an already assigned id or null if not assigned
        */
-      private String getAssignedId(final CloudState state, final String nodeName,
+      private String getAssignedId(final ClusterState state, final String nodeName,
           final ZkNodeProps coreState) {
         final String key = coreState.get(ZkStateReader.NODE_NAME_PROP) + "_" +  coreState.get(ZkStateReader.CORE_NAME_PROP);
         Map<String, Slice> slices = state.getSlices(coreState.get(ZkStateReader.COLLECTION_PROP));
@@ -276,7 +276,7 @@ public class Overseer {
         return null;
       }
       
-      private CloudState updateSlice(CloudState state, String collection, Slice slice) {
+      private ClusterState updateSlice(ClusterState state, String collection, Slice slice) {
         
         final Map<String, Map<String, Slice>> newStates = new LinkedHashMap<String,Map<String,Slice>>();
         newStates.putAll(state.getCollectionStates());
@@ -306,10 +306,10 @@ public class Overseer {
           final Slice updatedSlice = new Slice(slice.getName(), shards);
           slices.put(slice.getName(), updatedSlice);
         }
-        return new CloudState(state.getLiveNodes(), newStates);
+        return new ClusterState(state.getLiveNodes(), newStates);
       }
       
-      private CloudState setShardLeader(CloudState state, String collection, String sliceName, String leaderUrl) {
+      private ClusterState setShardLeader(ClusterState state, String collection, String sliceName, String leaderUrl) {
         
         final Map<String, Map<String, Slice>> newStates = new LinkedHashMap<String,Map<String,Slice>>();
         newStates.putAll(state.getCollectionStates());
@@ -341,21 +341,21 @@ public class Overseer {
           Slice slice = new Slice(sliceName, newShards);
           slices.put(sliceName, slice);
         }
-        return new CloudState(state.getLiveNodes(), newStates);
+        return new ClusterState(state.getLiveNodes(), newStates);
       }
       
       /*
        * Remove core from cloudstate
        */
-      private CloudState removeCore(final CloudState cloudState, ZkNodeProps message) {
+      private ClusterState removeCore(final ClusterState clusterState, ZkNodeProps message) {
         
         final String coreNodeName = message.get(ZkStateReader.NODE_NAME_PROP) + "_" + message.get(ZkStateReader.CORE_NAME_PROP);
         final String collection = message.get(ZkStateReader.COLLECTION_PROP);
 
         final LinkedHashMap<String, Map<String, Slice>> newStates = new LinkedHashMap<String,Map<String,Slice>>();
-        for(String collectionName: cloudState.getCollections()) {
+        for(String collectionName: clusterState.getCollections()) {
           if(collection.equals(collectionName)) {
-            Map<String, Slice> slices = cloudState.getSlices(collection);
+            Map<String, Slice> slices = clusterState.getSlices(collection);
             LinkedHashMap<String, Slice> newSlices = new LinkedHashMap<String, Slice>();
             for(Slice slice: slices.values()) {
               if(slice.getShards().containsKey(coreNodeName)) {
@@ -393,29 +393,53 @@ public class Overseer {
               }
             }
           } else {
-            newStates.put(collectionName, cloudState.getSlices(collectionName));
+            newStates.put(collectionName, clusterState.getSlices(collectionName));
           }
         }
-        CloudState newState = new CloudState(cloudState.getLiveNodes(), newStates);
+        ClusterState newState = new ClusterState(clusterState.getLiveNodes(), newStates);
         return newState;
      }
     
   }
+
+  private Thread ccThread;
+
+  private Thread updaterThread;
+
+  private volatile boolean isClosed;
+
+  private ZkStateReader reader;
+
+  private ShardHandler shardHandler;
+
+  private String adminPath;
+  
+  public Overseer(ShardHandler shardHandler, String adminPath, final ZkStateReader reader) throws KeeperException, InterruptedException {
+    this.reader = reader;
+    this.shardHandler = shardHandler;
+    this.adminPath = adminPath;
+  }
   
-  public Overseer(ShardHandler shardHandler, String adminPath, final ZkStateReader reader, final String id) throws KeeperException, InterruptedException {
+  public void start(String id) {
     log.info("Overseer (id=" + id + ") starting");
     createOverseerNode(reader.getZkClient());
     //launch cluster state updater thread
     ThreadGroup tg = new ThreadGroup("Overseer state updater.");
-    Thread updaterThread = new Thread(tg, new CloudStateUpdater(reader, id));
+    updaterThread = new Thread(tg, new ClusterStateUpdater(reader, id));
     updaterThread.setDaemon(true);
-    updaterThread.start();
-    
+
     ThreadGroup ccTg = new ThreadGroup("Overseer collection creation process.");
-    Thread ccThread = new Thread(ccTg, new OverseerCollectionProcessor(reader, id, shardHandler, adminPath));
+    ccThread = new Thread(ccTg, new OverseerCollectionProcessor(reader, id, shardHandler, adminPath), 
+        "Overseer-" + id);
     ccThread.setDaemon(true);
+    
+    updaterThread.start();
     ccThread.start();
   }
+  
+  public void close() {
+    isClosed = true;
+  }
 
   /**
    * Get queue that can be used to send messages to Overseer.

Modified: lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java?rev=1369470&r1=1369469&r2=1369470&view=diff
==============================================================================
--- lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java (original)
+++ lucene/dev/branches/pforcodec_3892/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java Sat Aug  4 21:26:10 2012
@@ -25,7 +25,7 @@ import java.util.Set;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.cloud.CloudState;
+import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
@@ -64,6 +64,8 @@ public class OverseerCollectionProcessor
   private String adminPath;
 
   private ZkStateReader zkStateReader;
+
+  private boolean isClosed;
   
   public OverseerCollectionProcessor(ZkStateReader zkStateReader, String myId, ShardHandler shardHandler, String adminPath) {
     this.zkStateReader = zkStateReader;
@@ -76,7 +78,7 @@ public class OverseerCollectionProcessor
   @Override
   public void run() {
     log.info("Process current queue of collection creations");
-    while (amILeader()) {
+    while (amILeader() && !isClosed) {
       try {
         byte[] head = workQueue.peek(true);
         
@@ -108,6 +110,10 @@ public class OverseerCollectionProcessor
     }
   }
   
+  public void close() {
+    isClosed = true;
+  }
+  
   private boolean amILeader() {
     try {
       ZkNodeProps props = ZkNodeProps.load(zkStateReader.getZkClient().getData(
@@ -126,22 +132,22 @@ public class OverseerCollectionProcessor
   
   private boolean processMessage(ZkNodeProps message, String operation) {
     if (CREATECOLLECTION.equals(operation)) {
-      return createCollection(zkStateReader.getCloudState(), message);
+      return createCollection(zkStateReader.getClusterState(), message);
     } else if (DELETECOLLECTION.equals(operation)) {
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.set(CoreAdminParams.ACTION, CoreAdminAction.UNLOAD.toString());
       params.set(CoreAdminParams.DELETE_INSTANCE_DIR, true);
-      return collectionCmd(zkStateReader.getCloudState(), message, params);
+      return collectionCmd(zkStateReader.getClusterState(), message, params);
     } else if (RELOADCOLLECTION.equals(operation)) {
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.set(CoreAdminParams.ACTION, CoreAdminAction.RELOAD.toString());
-      return collectionCmd(zkStateReader.getCloudState(), message, params);
+      return collectionCmd(zkStateReader.getClusterState(), message, params);
     }
     // unknown command, toss it from our queue
     return true;
   }
 
-  private boolean createCollection(CloudState cloudState, ZkNodeProps message) {
+  private boolean createCollection(ClusterState clusterState, ZkNodeProps message) {
     
     // look at the replication factor and see if it matches reality
     // if it does not, find best nodes to create more cores
@@ -176,7 +182,7 @@ public class OverseerCollectionProcessor
     
     // TODO: add smarter options that look at the current number of cores per node?
     // for now we just go random
-    Set<String> nodes = cloudState.getLiveNodes();
+    Set<String> nodes = clusterState.getLiveNodes();
     List<String> nodeList = new ArrayList<String>(nodes.size());
     nodeList.addAll(nodes);
     Collections.shuffle(nodeList);
@@ -229,11 +235,11 @@ public class OverseerCollectionProcessor
     return true;
   }
   
-  private boolean collectionCmd(CloudState cloudState, ZkNodeProps message, ModifiableSolrParams params) {
+  private boolean collectionCmd(ClusterState clusterState, ZkNodeProps message, ModifiableSolrParams params) {
     log.info("Executing Collection Cmd : " + params);
     String name = message.get("name");
     
-    Map<String,Slice> slices = cloudState.getCollectionStates().get(name);
+    Map<String,Slice> slices = clusterState.getCollectionStates().get(name);
     
     if (slices == null) {
       throw new SolrException(ErrorCode.BAD_REQUEST, "Could not find collection:" + name);
@@ -245,7 +251,7 @@ public class OverseerCollectionProcessor
       Set<Map.Entry<String,ZkNodeProps>> shardEntries = shards.entrySet();
       for (Map.Entry<String,ZkNodeProps> shardEntry : shardEntries) {
         final ZkNodeProps node = shardEntry.getValue();
-        if (cloudState.liveNodesContain(node.get(ZkStateReader.NODE_NAME_PROP))) {
+        if (clusterState.liveNodesContain(node.get(ZkStateReader.NODE_NAME_PROP))) {
           params.set(CoreAdminParams.CORE, node.get(ZkStateReader.CORE_NAME_PROP));
 
           String replica = node.get(ZkStateReader.BASE_URL_PROP);