You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by er...@apache.org on 2012/09/17 18:02:02 UTC

svn commit: r1386681 [7/8] - in /lucene/dev/trunk: lucene/analysis/common/src/java/org/apache/lucene/analysis/br/ lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/ lucene/analysis/common/src/java/org/apache/lucene/analysis/compound...

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java Mon Sep 17 16:01:56 2012
@@ -89,17 +89,17 @@ public class DocBuilder {
     
     String writerClassStr = null;
     if(reqParams!=null && reqParams.getRawParams() != null) {
-    	writerClassStr = (String) reqParams.getRawParams().get(PARAM_WRITER_IMPL);
+      writerClassStr = (String) reqParams.getRawParams().get(PARAM_WRITER_IMPL);
     }
     if(writerClassStr != null && !writerClassStr.equals(DEFAULT_WRITER_NAME) && !writerClassStr.equals(DocBuilder.class.getPackage().getName() + "." + DEFAULT_WRITER_NAME)) {
-    	try {
-    		Class<DIHWriter> writerClass = loadClass(writerClassStr, dataImporter.getCore());
-    		this.writer = writerClass.newInstance();
-    	} catch (Exception e) {
-    		throw new DataImportHandlerException(DataImportHandlerException.SEVERE, "Unable to load Writer implementation:" + writerClassStr, e);
-    	}
-   	} else {
-    	writer = solrWriter;
+      try {
+        Class<DIHWriter> writerClass = loadClass(writerClassStr, dataImporter.getCore());
+        this.writer = writerClass.newInstance();
+      } catch (Exception e) {
+        throw new DataImportHandlerException(DataImportHandlerException.SEVERE, "Unable to load Writer implementation:" + writerClassStr, e);
+      }
+     } else {
+      writer = solrWriter;
     }
     ContextImpl ctx = new ContextImpl(null, null, null, null, reqParams.getRawParams(), null, this);
     writer.init(ctx);
@@ -178,111 +178,111 @@ public class DocBuilder {
   @SuppressWarnings("unchecked")
   public void execute() {
     List<EntityProcessorWrapper> epwList = null;
-  	try {
-	    dataImporter.store(DataImporter.STATUS_MSGS, statusMessages);
-	    config = dataImporter.getConfig();
-	    final AtomicLong startTime = new AtomicLong(System.currentTimeMillis());
-	    statusMessages.put(TIME_ELAPSED, new Object() {
-	      @Override
-	      public String toString() {
-	        return getTimeElapsedSince(startTime.get());
-	      }
-	    });
-	
-	    statusMessages.put(DataImporter.MSG.TOTAL_QUERIES_EXECUTED,
-	            importStatistics.queryCount);
-	    statusMessages.put(DataImporter.MSG.TOTAL_ROWS_EXECUTED,
-	            importStatistics.rowsCount);
-	    statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED,
-	            importStatistics.docCount);
-	    statusMessages.put(DataImporter.MSG.TOTAL_DOCS_SKIPPED,
-	            importStatistics.skipDocCount);
-	
-	    List<String> entities = reqParams.getEntitiesToRun();
-	
-	    // Trigger onImportStart
-	    if (config.getOnImportStart() != null) {
-	      invokeEventListener(config.getOnImportStart());
-	    }
-	    AtomicBoolean fullCleanDone = new AtomicBoolean(false);
-	    //we must not do a delete of *:* multiple times if there are multiple root entities to be run
-	    Properties lastIndexTimeProps = new Properties();
-	    lastIndexTimeProps.setProperty(LAST_INDEX_KEY,
-	            DataImporter.DATE_TIME_FORMAT.get().format(dataImporter.getIndexStartTime()));
-	    
-	    epwList = new ArrayList<EntityProcessorWrapper>(config.getEntities().size());
-	    for (Entity e : config.getEntities()) {
-	      epwList.add(getEntityProcessorWrapper(e));
-	    }	    
-	    for (EntityProcessorWrapper epw : epwList) {
-	      if (entities != null && !entities.contains(epw.getEntity().getName()))
-	        continue;
-	      lastIndexTimeProps.setProperty(epw.getEntity().getName() + "." + LAST_INDEX_KEY,
-	              DataImporter.DATE_TIME_FORMAT.get().format(new Date()));
-	      currentEntityProcessorWrapper = epw;
-	      String delQuery = epw.getEntity().getAllAttributes().get("preImportDeleteQuery");
-	      if (dataImporter.getStatus() == DataImporter.Status.RUNNING_DELTA_DUMP) {
-	        cleanByQuery(delQuery, fullCleanDone);
-	        doDelta();
-	        delQuery = epw.getEntity().getAllAttributes().get("postImportDeleteQuery");
-	        if (delQuery != null) {
-	          fullCleanDone.set(false);
-	          cleanByQuery(delQuery, fullCleanDone);
-	        }
-	      } else {
-	        cleanByQuery(delQuery, fullCleanDone);
-	        doFullDump();
-	        delQuery = epw.getEntity().getAllAttributes().get("postImportDeleteQuery");
-	        if (delQuery != null) {
-	          fullCleanDone.set(false);
-	          cleanByQuery(delQuery, fullCleanDone);
-	        }
-	      }
-	      statusMessages.remove(DataImporter.MSG.TOTAL_DOC_PROCESSED);
-	    }
-	
-	    if (stop.get()) {
-	      // Dont commit if aborted using command=abort
-	      statusMessages.put("Aborted", DataImporter.DATE_TIME_FORMAT.get().format(new Date()));
-	      rollback();
-	    } else {
-	      // Do not commit unnecessarily if this is a delta-import and no documents were created or deleted
-	      if (!reqParams.isClean()) {
-	        if (importStatistics.docCount.get() > 0 || importStatistics.deletedDocCount.get() > 0) {
-	          finish(lastIndexTimeProps);
-	        }
-	      } else {
-	        // Finished operation normally, commit now
-	        finish(lastIndexTimeProps);
-	      } 
-	      
-	      if (config.getOnImportEnd() != null) {
-	        invokeEventListener(config.getOnImportEnd());
-	      }
-	    }
-	
-	    statusMessages.remove(TIME_ELAPSED);
-	    statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, ""+ importStatistics.docCount.get());
-	    if(importStatistics.failedDocCount.get() > 0)
-	      statusMessages.put(DataImporter.MSG.TOTAL_FAILED_DOCS, ""+ importStatistics.failedDocCount.get());
-	
-	    statusMessages.put("Time taken", getTimeElapsedSince(startTime.get()));
-	    LOG.info("Time taken = " + getTimeElapsedSince(startTime.get()));
-	  } catch(Exception e)
-		{
-			throw new RuntimeException(e);
-		} finally
-		{
-			if (writer != null) {
-	      writer.close();
-	    }
-			if (epwList != null) {
-			  closeEntityProcessorWrappers(epwList);
-			}
-			if(reqParams.isDebug()) {
-				reqParams.getDebugInfo().debugVerboseOutput = getDebugLogger().output;	
-			}
-		}
+    try {
+      dataImporter.store(DataImporter.STATUS_MSGS, statusMessages);
+      config = dataImporter.getConfig();
+      final AtomicLong startTime = new AtomicLong(System.currentTimeMillis());
+      statusMessages.put(TIME_ELAPSED, new Object() {
+        @Override
+        public String toString() {
+          return getTimeElapsedSince(startTime.get());
+        }
+      });
+
+      statusMessages.put(DataImporter.MSG.TOTAL_QUERIES_EXECUTED,
+              importStatistics.queryCount);
+      statusMessages.put(DataImporter.MSG.TOTAL_ROWS_EXECUTED,
+              importStatistics.rowsCount);
+      statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED,
+              importStatistics.docCount);
+      statusMessages.put(DataImporter.MSG.TOTAL_DOCS_SKIPPED,
+              importStatistics.skipDocCount);
+
+      List<String> entities = reqParams.getEntitiesToRun();
+
+      // Trigger onImportStart
+      if (config.getOnImportStart() != null) {
+        invokeEventListener(config.getOnImportStart());
+      }
+      AtomicBoolean fullCleanDone = new AtomicBoolean(false);
+      //we must not do a delete of *:* multiple times if there are multiple root entities to be run
+      Properties lastIndexTimeProps = new Properties();
+      lastIndexTimeProps.setProperty(LAST_INDEX_KEY,
+              DataImporter.DATE_TIME_FORMAT.get().format(dataImporter.getIndexStartTime()));
+
+      epwList = new ArrayList<EntityProcessorWrapper>(config.getEntities().size());
+      for (Entity e : config.getEntities()) {
+        epwList.add(getEntityProcessorWrapper(e));
+      }
+      for (EntityProcessorWrapper epw : epwList) {
+        if (entities != null && !entities.contains(epw.getEntity().getName()))
+          continue;
+        lastIndexTimeProps.setProperty(epw.getEntity().getName() + "." + LAST_INDEX_KEY,
+                DataImporter.DATE_TIME_FORMAT.get().format(new Date()));
+        currentEntityProcessorWrapper = epw;
+        String delQuery = epw.getEntity().getAllAttributes().get("preImportDeleteQuery");
+        if (dataImporter.getStatus() == DataImporter.Status.RUNNING_DELTA_DUMP) {
+          cleanByQuery(delQuery, fullCleanDone);
+          doDelta();
+          delQuery = epw.getEntity().getAllAttributes().get("postImportDeleteQuery");
+          if (delQuery != null) {
+            fullCleanDone.set(false);
+            cleanByQuery(delQuery, fullCleanDone);
+          }
+        } else {
+          cleanByQuery(delQuery, fullCleanDone);
+          doFullDump();
+          delQuery = epw.getEntity().getAllAttributes().get("postImportDeleteQuery");
+          if (delQuery != null) {
+            fullCleanDone.set(false);
+            cleanByQuery(delQuery, fullCleanDone);
+          }
+        }
+        statusMessages.remove(DataImporter.MSG.TOTAL_DOC_PROCESSED);
+      }
+
+      if (stop.get()) {
+        // Dont commit if aborted using command=abort
+        statusMessages.put("Aborted", DataImporter.DATE_TIME_FORMAT.get().format(new Date()));
+        rollback();
+      } else {
+        // Do not commit unnecessarily if this is a delta-import and no documents were created or deleted
+        if (!reqParams.isClean()) {
+          if (importStatistics.docCount.get() > 0 || importStatistics.deletedDocCount.get() > 0) {
+            finish(lastIndexTimeProps);
+          }
+        } else {
+          // Finished operation normally, commit now
+          finish(lastIndexTimeProps);
+        }
+
+        if (config.getOnImportEnd() != null) {
+          invokeEventListener(config.getOnImportEnd());
+        }
+      }
+
+      statusMessages.remove(TIME_ELAPSED);
+      statusMessages.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, ""+ importStatistics.docCount.get());
+      if(importStatistics.failedDocCount.get() > 0)
+        statusMessages.put(DataImporter.MSG.TOTAL_FAILED_DOCS, ""+ importStatistics.failedDocCount.get());
+
+      statusMessages.put("Time taken", getTimeElapsedSince(startTime.get()));
+      LOG.info("Time taken = " + getTimeElapsedSince(startTime.get()));
+    } catch(Exception e)
+    {
+      throw new RuntimeException(e);
+    } finally
+    {
+      if (writer != null) {
+        writer.close();
+      }
+      if (epwList != null) {
+        closeEntityProcessorWrappers(epwList);
+      }
+      if(reqParams.isDebug()) {
+        reqParams.getDebugInfo().debugVerboseOutput = getDebugLogger().output;
+      }
+    }
   }
   private void closeEntityProcessorWrappers(List<EntityProcessorWrapper> epwList) {
     for(EntityProcessorWrapper epw : epwList) {
@@ -506,7 +506,7 @@ public class DocBuilder {
             if (!doc.isEmpty()) {
               boolean result = writer.upload(doc);
               if(reqParams.isDebug()) {
-              	reqParams.getDebugInfo().debugDocuments.add(doc);
+                reqParams.getDebugInfo().debugDocuments.add(doc);
               }
               doc = null;
               if (result){

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorBase.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorBase.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorBase.java Mon Sep 17 16:01:56 2012
@@ -133,11 +133,11 @@ public class EntityProcessorBase extends
 
   @Override
   public void destroy() {
-  	query = null;
-  	if(cacheSupport!=null){
-  	  cacheSupport.destroyAll();
-  	}
-  	cacheSupport = null;
+    query = null;
+    if(cacheSupport!=null){
+      cacheSupport.destroyAll();
+    }
+    cacheSupport = null;
   }
 
   

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java Mon Sep 17 16:01:56 2012
@@ -30,15 +30,15 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class SimplePropertiesWriter implements DIHPropertiesWriter {
-	private static final Logger log = LoggerFactory.getLogger(SimplePropertiesWriter.class);
+  private static final Logger log = LoggerFactory.getLogger(SimplePropertiesWriter.class);
 
-	static final String IMPORTER_PROPERTIES = "dataimport.properties";
+  static final String IMPORTER_PROPERTIES = "dataimport.properties";
 
-	static final String LAST_INDEX_KEY = "last_index_time";
+  static final String LAST_INDEX_KEY = "last_index_time";
 
-	private String persistFilename = IMPORTER_PROPERTIES;
+  private String persistFilename = IMPORTER_PROPERTIES;
 
-	private String configDir = null;
+  private String configDir = null;
 
 
 
@@ -48,15 +48,15 @@ public class SimplePropertiesWriter impl
       String persistFileName = dataImporter.getHandlerName();
 
       this.configDir = configDir;
-	  if(persistFileName != null){
+    if(persistFileName != null){
         persistFilename = persistFileName + ".properties";
       }
     }
 
 
 
-	
-	private File getPersistFile() {
+
+  private File getPersistFile() {
     String filePath = configDir;
     if (configDir != null && !configDir.endsWith(File.separator))
       filePath += File.separator;
@@ -71,53 +71,53 @@ public class SimplePropertiesWriter impl
     }
 
     @Override
-	public void persist(Properties p) {
-		OutputStream propOutput = null;
+  public void persist(Properties p) {
+    OutputStream propOutput = null;
+
+    Properties props = readIndexerProperties();
 
-		Properties props = readIndexerProperties();
+    try {
+      props.putAll(p);
+      String filePath = configDir;
+      if (configDir != null && !configDir.endsWith(File.separator))
+        filePath += File.separator;
+      filePath += persistFilename;
+      propOutput = new FileOutputStream(filePath);
+      props.store(propOutput, null);
+      log.info("Wrote last indexed time to " + persistFilename);
+    } catch (Exception e) {
+      throw new DataImportHandlerException(DataImportHandlerException.SEVERE, "Unable to persist Index Start Time", e);
+    } finally {
+      try {
+        if (propOutput != null)
+          propOutput.close();
+      } catch (IOException e) {
+        propOutput = null;
+      }
+    }
+  }
 
-		try {
-			props.putAll(p);
-			String filePath = configDir;
-			if (configDir != null && !configDir.endsWith(File.separator))
-				filePath += File.separator;
-			filePath += persistFilename;
-			propOutput = new FileOutputStream(filePath);
-			props.store(propOutput, null);
-			log.info("Wrote last indexed time to " + persistFilename);
-		} catch (Exception e) {
-			throw new DataImportHandlerException(DataImportHandlerException.SEVERE, "Unable to persist Index Start Time", e);
-		} finally {
-			try {
-				if (propOutput != null)
-					propOutput.close();
-			} catch (IOException e) {
-				propOutput = null;
-			}
-		}
-	}
-
-	@Override
-	public Properties readIndexerProperties() {
-		Properties props = new Properties();
-		InputStream propInput = null;
-
-		try {
-			propInput = new FileInputStream(configDir + persistFilename);
-			props.load(propInput);
-			log.info("Read " + persistFilename);
-		} catch (Exception e) {
-			log.warn("Unable to read: " + persistFilename);
-		} finally {
-			try {
-				if (propInput != null)
-					propInput.close();
-			} catch (IOException e) {
-				propInput = null;
-			}
-		}
+  @Override
+  public Properties readIndexerProperties() {
+    Properties props = new Properties();
+    InputStream propInput = null;
+
+    try {
+      propInput = new FileInputStream(configDir + persistFilename);
+      props.load(propInput);
+      log.info("Read " + persistFilename);
+    } catch (Exception e) {
+      log.warn("Unable to read: " + persistFilename);
+    } finally {
+      try {
+        if (propInput != null)
+          propInput.close();
+      } catch (IOException e) {
+        propInput = null;
+      }
+    }
 
-		return props;
-	}
+    return props;
+  }
 
 }

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java Mon Sep 17 16:01:56 2012
@@ -54,12 +54,12 @@ public class SolrWriter extends DIHWrite
   
   @Override
   public void close() {
-  	try {
-  		processor.finish();
-  	} catch (IOException e) {
-  		throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
-  				"Unable to call finish() on UpdateRequestProcessor", e);
-  	}
+    try {
+      processor.finish();
+    } catch (IOException e) {
+      throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
+          "Unable to call finish() on UpdateRequestProcessor", e);
+    }
   }
   @Override
   public boolean upload(SolrInputDocument d) {
@@ -87,8 +87,8 @@ public class SolrWriter extends DIHWrite
       log.error("Exception while deleteing: " + id, e);
     }
   }
-  	
-	@Override
+
+  @Override
   public void deleteByQuery(String query) {
     try {
       log.info("Deleting documents from Solr with query: " + query);
@@ -100,7 +100,7 @@ public class SolrWriter extends DIHWrite
     }
   }
 
-	@Override
+  @Override
   public void commit(boolean optimize) {
     try {
       CommitUpdateCommand commit = new CommitUpdateCommand(req,optimize);
@@ -110,7 +110,7 @@ public class SolrWriter extends DIHWrite
     }
   }
 
-	@Override
+  @Override
   public void rollback() {
     try {
       RollbackUpdateCommand rollback = new RollbackUpdateCommand(req);
@@ -120,7 +120,7 @@ public class SolrWriter extends DIHWrite
     }
   }
 
-	@Override
+  @Override
   public void doDeleteAll() {
     try {
       DeleteUpdateCommand deleteCommand = new DeleteUpdateCommand(req);
@@ -158,8 +158,8 @@ public class SolrWriter extends DIHWrite
       return null;
     }
   }
-	@Override
-	public void init(Context context) {
-		/* NO-OP */		
-	}	
+  @Override
+  public void init(Context context) {
+    /* NO-OP */
+  }
 }

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDIHCacheTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDIHCacheTestCase.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDIHCacheTestCase.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDIHCacheTestCase.java Mon Sep 17 16:01:56 2012
@@ -38,197 +38,197 @@ import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 
-public class AbstractDIHCacheTestCase {	
-	protected static final Date Feb21_2011 = new Date(1298268000000l);
-	protected final String[] fieldTypes = { "INTEGER", "BIGDECIMAL", "STRING", "STRING",   "FLOAT",   "DATE",   "CLOB" };
-	protected final String[] fieldNames = { "a_id",    "PI",         "letter", "examples", "a_float", "a_date", "DESCRIPTION" };
-	protected List<ControlData> data = new ArrayList<ControlData>();
-	protected Clob APPLE = null;
-	
-	@Before
-	public void setup() {
-		try {
-			APPLE = new SerialClob(new String("Apples grow on trees and they are good to eat.").toCharArray());
-		} catch (SQLException sqe) {
-			Assert.fail("Could not Set up Test");
-		}
-
-		// The first row needs to have all non-null fields,
-		// otherwise we would have to always send the fieldTypes & fieldNames as CacheProperties when building.
-		data = new ArrayList<ControlData>();
-		data.add(new ControlData(new Object[] { new Integer(1), new BigDecimal(Math.PI), "A", "Apple", new Float(1.11), Feb21_2011, APPLE }));
-		data.add(new ControlData(new Object[] { new Integer(2), new BigDecimal(Math.PI), "B", "Ball", new Float(2.22), Feb21_2011, null }));
-		data.add(new ControlData(new Object[] { new Integer(4), new BigDecimal(Math.PI), "D", "Dog", new Float(4.44), Feb21_2011, null }));
-		data.add(new ControlData(new Object[] { new Integer(3), new BigDecimal(Math.PI), "C", "Cookie", new Float(3.33), Feb21_2011, null }));
-		data.add(new ControlData(new Object[] { new Integer(4), new BigDecimal(Math.PI), "D", "Daisy", new Float(4.44), Feb21_2011, null }));
-		data.add(new ControlData(new Object[] { new Integer(4), new BigDecimal(Math.PI), "D", "Drawing", new Float(4.44), Feb21_2011, null }));
-		data.add(new ControlData(new Object[] { new Integer(5), new BigDecimal(Math.PI), "E",
-				Arrays.asList("Eggplant", "Ear", "Elephant", "Engine"), new Float(5.55), Feb21_2011, null }));
-	}
-
-	@After
-	public void teardown() {
-		APPLE = null;
-		data = null;
-	}
-	
-	//A limitation of this test class is that the primary key needs to be the first one in the list.
-	//DIHCaches, however, can handle any field being the primary key.
-	class ControlData implements Comparable<ControlData>, Iterable<Object> {
-		Object[] data;
-
-		ControlData(Object[] data) {
-			this.data = data;
-		}
-
-		@SuppressWarnings("unchecked")
-		public int compareTo(ControlData cd) {
-			Comparable c1 = (Comparable) data[0];
-			Comparable c2 = (Comparable) cd.data[0];
-			return c1.compareTo(c2);
-		}
-
-		public Iterator<Object> iterator() {
-			return Arrays.asList(data).iterator();
-		}
-	}
-	
-	protected void loadData(DIHCache cache, List<ControlData> theData, String[] theFieldNames, boolean keepOrdered) {
-		for (ControlData cd : theData) {
-			cache.add(controlDataToMap(cd, theFieldNames, keepOrdered));
-		}
-	}
-
-	protected List<ControlData> extractDataInKeyOrder(DIHCache cache, String[] theFieldNames) {
-		List<Object[]> data = new ArrayList<Object[]>();
-		Iterator<Map<String, Object>> cacheIter = cache.iterator();
-		while (cacheIter.hasNext()) {
-			data.add(mapToObjectArray(cacheIter.next(), theFieldNames));
-		}
-		return listToControlData(data);
-	}
-
-	//This method assumes that the Primary Keys are integers and that the first id=1.  
-	//It will look for id's sequentially until one is skipped, then will stop.
-	protected List<ControlData> extractDataByKeyLookup(DIHCache cache, String[] theFieldNames) {
-		int recId = 1;
-		List<Object[]> data = new ArrayList<Object[]>();
-		while (true) {
-			Iterator<Map<String, Object>> listORecs = cache.iterator(recId);
-			if (listORecs == null) {
-				break;
-			}
-
-			while(listORecs.hasNext()) {
-				data.add(mapToObjectArray(listORecs.next(), theFieldNames));
-			}
-			recId++;
-		}
-		return listToControlData(data);
-	}
-
-	protected List<ControlData> listToControlData(List<Object[]> data) {
-		List<ControlData> returnData = new ArrayList<ControlData>(data.size());
-		for (int i = 0; i < data.size(); i++) {
-			returnData.add(new ControlData(data.get(i)));
-		}
-		return returnData;
-	}
-
-	protected Object[] mapToObjectArray(Map<String, Object> rec, String[] theFieldNames) {
-		Object[] oos = new Object[theFieldNames.length];
-		for (int i = 0; i < theFieldNames.length; i++) {
-			oos[i] = rec.get(theFieldNames[i]);
-		}
-		return oos;
-	}
-
-	protected void compareData(List<ControlData> theControl, List<ControlData> test) {
-		// The test data should come back primarily in Key order and secondarily in insertion order.
-		List<ControlData> control = new ArrayList<ControlData>(theControl);
-		Collections.sort(control);
-
-		StringBuilder errors = new StringBuilder();
-		if (test.size() != control.size()) {
-			errors.append("-Returned data has " + test.size() + " records.  expected: " + control.size() + "\n");
-		}
-		for (int i = 0; i < control.size() && i < test.size(); i++) {
-			Object[] controlRec = control.get(i).data;
-			Object[] testRec = test.get(i).data;
-			if (testRec.length != controlRec.length) {
-				errors.append("-Record indexAt=" + i + " has " + testRec.length + " data elements.  extpected: " + controlRec.length + "\n");
-			}
-			for (int j = 0; j < controlRec.length && j < testRec.length; j++) {
-				Object controlObj = controlRec[j];
-				Object testObj = testRec[j];
-				if (controlObj == null && testObj != null) {
-					errors.append("-Record indexAt=" + i + ", Data Element indexAt=" + j + " is not NULL as expected.\n");
-				} else if (controlObj != null && testObj == null) {
-					errors.append("-Record indexAt=" + i + ", Data Element indexAt=" + j + " is NULL.  Expected: " + controlObj + " (class="
-							+ controlObj.getClass().getName() + ")\n");
-				} else if (controlObj != null && testObj != null && controlObj instanceof Clob) {
-					String controlString = clobToString((Clob) controlObj);
-					String testString = clobToString((Clob) testObj);
-					if (!controlString.equals(testString)) {
-						errors.append("-Record indexAt=" + i + ", Data Element indexAt=" + j + " has: " + testString + " (class=Clob) ... Expected: " + controlString
-								+ " (class=Clob)\n");
-					}
-				} else if (controlObj != null && !controlObj.equals(testObj)) {
-					errors.append("-Record indexAt=" + i + ", Data Element indexAt=" + j + " has: " + testObj + " (class=" + testObj.getClass().getName()
-							+ ") ... Expected: " + controlObj + " (class=" + controlObj.getClass().getName() + ")\n");
-				}
-			}
-		}
-		if (errors.length() > 0) {
-			Assert.fail(errors.toString());
-		}
-	}
-
-	protected Map<String, Object> controlDataToMap(ControlData cd, String[] theFieldNames, boolean keepOrdered) {
-		Map<String, Object> rec = null;
-		if (keepOrdered) {
-			rec = new LinkedHashMap<String, Object>();
-		} else {
-			rec = new HashMap<String, Object>();
-		}
-		for (int i = 0; i < cd.data.length; i++) {
-			String fieldName = theFieldNames[i];
-			Object data = cd.data[i];
-			rec.put(fieldName, data);
-		}
-		return rec;
-	}
-
-	protected String stringArrayToCommaDelimitedList(String[] strs) {
-		StringBuilder sb = new StringBuilder();
-		for (String a : strs) {
-			if (sb.length() > 0) {
-				sb.append(",");
-			}
-			sb.append(a);
-		}
-		return sb.toString();
-	}
-
-	protected String clobToString(Clob cl) {
-		StringBuilder sb = new StringBuilder();
-		try {
-			Reader in = cl.getCharacterStream();
-			char[] cbuf = new char[1024];
-			int numGot = -1;
-			while ((numGot = in.read(cbuf)) != -1) {
-				sb.append(String.valueOf(cbuf, 0, numGot));
-			}
-		} catch (Exception e) {
-			Assert.fail(e.toString());
-		}
-		return sb.toString();
-	}
-	
-	public static Context getContext(final Map<String, String> entityAttrs) {
-		VariableResolverImpl resolver = new VariableResolverImpl();
+public class AbstractDIHCacheTestCase {
+  protected static final Date Feb21_2011 = new Date(1298268000000l);
+  protected final String[] fieldTypes = { "INTEGER", "BIGDECIMAL", "STRING", "STRING",   "FLOAT",   "DATE",   "CLOB" };
+  protected final String[] fieldNames = { "a_id",    "PI",         "letter", "examples", "a_float", "a_date", "DESCRIPTION" };
+  protected List<ControlData> data = new ArrayList<ControlData>();
+  protected Clob APPLE = null;
+
+  @Before
+  public void setup() {
+    try {
+      APPLE = new SerialClob(new String("Apples grow on trees and they are good to eat.").toCharArray());
+    } catch (SQLException sqe) {
+      Assert.fail("Could not Set up Test");
+    }
+
+    // The first row needs to have all non-null fields,
+    // otherwise we would have to always send the fieldTypes & fieldNames as CacheProperties when building.
+    data = new ArrayList<ControlData>();
+    data.add(new ControlData(new Object[] { new Integer(1), new BigDecimal(Math.PI), "A", "Apple", new Float(1.11), Feb21_2011, APPLE }));
+    data.add(new ControlData(new Object[] { new Integer(2), new BigDecimal(Math.PI), "B", "Ball", new Float(2.22), Feb21_2011, null }));
+    data.add(new ControlData(new Object[] { new Integer(4), new BigDecimal(Math.PI), "D", "Dog", new Float(4.44), Feb21_2011, null }));
+    data.add(new ControlData(new Object[] { new Integer(3), new BigDecimal(Math.PI), "C", "Cookie", new Float(3.33), Feb21_2011, null }));
+    data.add(new ControlData(new Object[] { new Integer(4), new BigDecimal(Math.PI), "D", "Daisy", new Float(4.44), Feb21_2011, null }));
+    data.add(new ControlData(new Object[] { new Integer(4), new BigDecimal(Math.PI), "D", "Drawing", new Float(4.44), Feb21_2011, null }));
+    data.add(new ControlData(new Object[] { new Integer(5), new BigDecimal(Math.PI), "E",
+        Arrays.asList("Eggplant", "Ear", "Elephant", "Engine"), new Float(5.55), Feb21_2011, null }));
+  }
+
+  @After
+  public void teardown() {
+    APPLE = null;
+    data = null;
+  }
+
+  //A limitation of this test class is that the primary key needs to be the first one in the list.
+  //DIHCaches, however, can handle any field being the primary key.
+  class ControlData implements Comparable<ControlData>, Iterable<Object> {
+    Object[] data;
+
+    ControlData(Object[] data) {
+      this.data = data;
+    }
+
+    @SuppressWarnings("unchecked")
+    public int compareTo(ControlData cd) {
+      Comparable c1 = (Comparable) data[0];
+      Comparable c2 = (Comparable) cd.data[0];
+      return c1.compareTo(c2);
+    }
+
+    public Iterator<Object> iterator() {
+      return Arrays.asList(data).iterator();
+    }
+  }
+
+  protected void loadData(DIHCache cache, List<ControlData> theData, String[] theFieldNames, boolean keepOrdered) {
+    for (ControlData cd : theData) {
+      cache.add(controlDataToMap(cd, theFieldNames, keepOrdered));
+    }
+  }
+
+  protected List<ControlData> extractDataInKeyOrder(DIHCache cache, String[] theFieldNames) {
+    List<Object[]> data = new ArrayList<Object[]>();
+    Iterator<Map<String, Object>> cacheIter = cache.iterator();
+    while (cacheIter.hasNext()) {
+      data.add(mapToObjectArray(cacheIter.next(), theFieldNames));
+    }
+    return listToControlData(data);
+  }
+
+  //This method assumes that the Primary Keys are integers and that the first id=1.
+  //It will look for id's sequentially until one is skipped, then will stop.
+  protected List<ControlData> extractDataByKeyLookup(DIHCache cache, String[] theFieldNames) {
+    int recId = 1;
+    List<Object[]> data = new ArrayList<Object[]>();
+    while (true) {
+      Iterator<Map<String, Object>> listORecs = cache.iterator(recId);
+      if (listORecs == null) {
+        break;
+      }
+
+      while(listORecs.hasNext()) {
+        data.add(mapToObjectArray(listORecs.next(), theFieldNames));
+      }
+      recId++;
+    }
+    return listToControlData(data);
+  }
+
+  protected List<ControlData> listToControlData(List<Object[]> data) {
+    List<ControlData> returnData = new ArrayList<ControlData>(data.size());
+    for (int i = 0; i < data.size(); i++) {
+      returnData.add(new ControlData(data.get(i)));
+    }
+    return returnData;
+  }
+
+  protected Object[] mapToObjectArray(Map<String, Object> rec, String[] theFieldNames) {
+    Object[] oos = new Object[theFieldNames.length];
+    for (int i = 0; i < theFieldNames.length; i++) {
+      oos[i] = rec.get(theFieldNames[i]);
+    }
+    return oos;
+  }
+
+  protected void compareData(List<ControlData> theControl, List<ControlData> test) {
+    // The test data should come back primarily in Key order and secondarily in insertion order.
+    List<ControlData> control = new ArrayList<ControlData>(theControl);
+    Collections.sort(control);
+
+    StringBuilder errors = new StringBuilder();
+    if (test.size() != control.size()) {
+      errors.append("-Returned data has " + test.size() + " records.  expected: " + control.size() + "\n");
+    }
+    for (int i = 0; i < control.size() && i < test.size(); i++) {
+      Object[] controlRec = control.get(i).data;
+      Object[] testRec = test.get(i).data;
+      if (testRec.length != controlRec.length) {
+        errors.append("-Record indexAt=" + i + " has " + testRec.length + " data elements.  extpected: " + controlRec.length + "\n");
+      }
+      for (int j = 0; j < controlRec.length && j < testRec.length; j++) {
+        Object controlObj = controlRec[j];
+        Object testObj = testRec[j];
+        if (controlObj == null && testObj != null) {
+          errors.append("-Record indexAt=" + i + ", Data Element indexAt=" + j + " is not NULL as expected.\n");
+        } else if (controlObj != null && testObj == null) {
+          errors.append("-Record indexAt=" + i + ", Data Element indexAt=" + j + " is NULL.  Expected: " + controlObj + " (class="
+              + controlObj.getClass().getName() + ")\n");
+        } else if (controlObj != null && testObj != null && controlObj instanceof Clob) {
+          String controlString = clobToString((Clob) controlObj);
+          String testString = clobToString((Clob) testObj);
+          if (!controlString.equals(testString)) {
+            errors.append("-Record indexAt=" + i + ", Data Element indexAt=" + j + " has: " + testString + " (class=Clob) ... Expected: " + controlString
+                + " (class=Clob)\n");
+          }
+        } else if (controlObj != null && !controlObj.equals(testObj)) {
+          errors.append("-Record indexAt=" + i + ", Data Element indexAt=" + j + " has: " + testObj + " (class=" + testObj.getClass().getName()
+              + ") ... Expected: " + controlObj + " (class=" + controlObj.getClass().getName() + ")\n");
+        }
+      }
+    }
+    if (errors.length() > 0) {
+      Assert.fail(errors.toString());
+    }
+  }
+
+  protected Map<String, Object> controlDataToMap(ControlData cd, String[] theFieldNames, boolean keepOrdered) {
+    Map<String, Object> rec = null;
+    if (keepOrdered) {
+      rec = new LinkedHashMap<String, Object>();
+    } else {
+      rec = new HashMap<String, Object>();
+    }
+    for (int i = 0; i < cd.data.length; i++) {
+      String fieldName = theFieldNames[i];
+      Object data = cd.data[i];
+      rec.put(fieldName, data);
+    }
+    return rec;
+  }
+
+  protected String stringArrayToCommaDelimitedList(String[] strs) {
+    StringBuilder sb = new StringBuilder();
+    for (String a : strs) {
+      if (sb.length() > 0) {
+        sb.append(",");
+      }
+      sb.append(a);
+    }
+    return sb.toString();
+  }
+
+  protected String clobToString(Clob cl) {
+    StringBuilder sb = new StringBuilder();
+    try {
+      Reader in = cl.getCharacterStream();
+      char[] cbuf = new char[1024];
+      int numGot = -1;
+      while ((numGot = in.read(cbuf)) != -1) {
+        sb.append(String.valueOf(cbuf, 0, numGot));
+      }
+    } catch (Exception e) {
+      Assert.fail(e.toString());
+    }
+    return sb.toString();
+  }
+
+  public static Context getContext(final Map<String, String> entityAttrs) {
+    VariableResolverImpl resolver = new VariableResolverImpl();
     final Context delegate = new ContextImpl(null, resolver, null, null, new HashMap<String, Object>(), null, null);
     return new TestContext(entityAttrs, delegate, null, true);
   }
-	
+
 }

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java Mon Sep 17 16:01:56 2012
@@ -30,56 +30,56 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class TestSortedMapBackedCache extends AbstractDIHCacheTestCase {
-	
-	public static Logger log = LoggerFactory.getLogger(TestSortedMapBackedCache.class);
-	
-	@Test
-	public void testCacheWithKeyLookup() {
-		DIHCache cache = null;
-		try {
-			cache = new SortedMapBackedCache();
-			cache.open(getContext(new HashMap<String,String>()));
-			loadData(cache, data, fieldNames, true);
-			List<ControlData> testData = extractDataByKeyLookup(cache, fieldNames);
-			compareData(data, testData);
-		} catch (Exception e) {
-			log.warn("Exception thrown: " + e.toString());
-			Assert.fail();
-		} finally {
-			try {
-				cache.destroy();
-			} catch (Exception ex) {
-			}
-		}
-	}
-
-	@Test
-	public void testCacheWithOrderedLookup() {
-		DIHCache cache = null;
-		try {
-			cache = new SortedMapBackedCache();
-			cache.open(getContext(new HashMap<String,String>()));
-			loadData(cache, data, fieldNames, true);
-			List<ControlData> testData = extractDataInKeyOrder(cache, fieldNames);
-			compareData(data, testData);
-		} catch (Exception e) {
-			log.warn("Exception thrown: " + e.toString());
-			Assert.fail();
-		} finally {
-			try {
-				cache.destroy();
-			} catch (Exception ex) {
-			}
-		}
-	}
-	
-	@Test
-	public void testNullKeys() throws Exception {
-	  //A null key should just be ignored, but not throw an exception
-	  DIHCache cache = null;
-	  try {
-	    cache = new SortedMapBackedCache();
-	    Map<String, String> cacheProps = new HashMap<String, String>();
+  
+  public static Logger log = LoggerFactory.getLogger(TestSortedMapBackedCache.class);
+  
+  @Test
+  public void testCacheWithKeyLookup() {
+    DIHCache cache = null;
+    try {
+      cache = new SortedMapBackedCache();
+      cache.open(getContext(new HashMap<String,String>()));
+      loadData(cache, data, fieldNames, true);
+      List<ControlData> testData = extractDataByKeyLookup(cache, fieldNames);
+      compareData(data, testData);
+    } catch (Exception e) {
+      log.warn("Exception thrown: " + e.toString());
+      Assert.fail();
+    } finally {
+      try {
+        cache.destroy();
+      } catch (Exception ex) {
+      }
+    }
+  }
+
+  @Test
+  public void testCacheWithOrderedLookup() {
+    DIHCache cache = null;
+    try {
+      cache = new SortedMapBackedCache();
+      cache.open(getContext(new HashMap<String,String>()));
+      loadData(cache, data, fieldNames, true);
+      List<ControlData> testData = extractDataInKeyOrder(cache, fieldNames);
+      compareData(data, testData);
+    } catch (Exception e) {
+      log.warn("Exception thrown: " + e.toString());
+      Assert.fail();
+    } finally {
+      try {
+        cache.destroy();
+      } catch (Exception ex) {
+      }
+    }
+  }
+  
+  @Test
+  public void testNullKeys() throws Exception {
+    //A null key should just be ignored, but not throw an exception
+    DIHCache cache = null;
+    try {
+      cache = new SortedMapBackedCache();
+      Map<String, String> cacheProps = new HashMap<String, String>();
       cacheProps.put(DIHCacheSupport.CACHE_PRIMARY_KEY, "a_id");
       cache.open(getContext(cacheProps));
       
@@ -94,98 +94,98 @@ public class TestSortedMapBackedCache ex
       }
       Assert.assertNull(cache.iterator(null));
       cache.delete(null);      
-	  } catch (Exception e) {
-	    throw e;
+    } catch (Exception e) {
+      throw e;
     } finally {
       try {
         cache.destroy();
       } catch (Exception ex) {
       }
-    }	  
-	}
+    }    
+  }
 
-	@Test
-	public void testCacheReopensWithUpdate() {
-		DIHCache cache = null;
-		try {			
-			Map<String, String> cacheProps = new HashMap<String, String>();
-			cacheProps.put(DIHCacheSupport.CACHE_PRIMARY_KEY, "a_id");
-			
-			cache = new SortedMapBackedCache();
-			cache.open(getContext(cacheProps));
-			// We can let the data hit the cache with the fields out of order because
-			// we've identified the pk up-front.
-			loadData(cache, data, fieldNames, false);
-
-			// Close the cache.
-			cache.close();
-
-			List<ControlData> newControlData = new ArrayList<ControlData>();
-			Object[] newIdEqualsThree = null;
-			int j = 0;
-			for (int i = 0; i < data.size(); i++) {
-				// We'll be deleting a_id=1 so remove it from the control data.
-				if (data.get(i).data[0].equals(new Integer(1))) {
-					continue;
-				}
-
-				// We'll be changing "Cookie" to "Carrot" in a_id=3 so change it in the control data.
-				if (data.get(i).data[0].equals(new Integer(3))) {
-					newIdEqualsThree = new Object[data.get(i).data.length];
-					System.arraycopy(data.get(i).data, 0, newIdEqualsThree, 0, newIdEqualsThree.length);
-					newIdEqualsThree[3] = "Carrot";
-					newControlData.add(new ControlData(newIdEqualsThree));
-				}
-				// Everything else can just be copied over.
-				else {
-					newControlData.add(data.get(i));
-				}
-
-				j++;
-			}
-
-			// These new rows of data will get added to the cache, so add them to the control data too.
-			Object[] newDataRow1 = new Object[] { new Integer(99), new BigDecimal(Math.PI), "Z", "Zebra", new Float(99.99), Feb21_2011, null };
-			Object[] newDataRow2 = new Object[] { new Integer(2), new BigDecimal(Math.PI), "B", "Ballerina", new Float(2.22), Feb21_2011, null };
-
-			newControlData.add(new ControlData(newDataRow1));
-			newControlData.add(new ControlData(newDataRow2));
-
-			// Re-open the cache
-			cache.open(getContext(new HashMap<String,String>()));
-
-			// Delete a_id=1 from the cache.
-			cache.delete(new Integer(1));
-
-			// Because the cache allows duplicates, the only way to update is to
-			// delete first then add.
-			cache.delete(new Integer(3));
-			cache.add(controlDataToMap(new ControlData(newIdEqualsThree), fieldNames, false));
-
-			// Add this row with a new Primary key.
-			cache.add(controlDataToMap(new ControlData(newDataRow1), fieldNames, false));
-
-			// Add this row, creating two records in the cache with a_id=2.
-			cache.add(controlDataToMap(new ControlData(newDataRow2), fieldNames, false));
-
-			// Read the cache back and compare to the newControlData
-			List<ControlData> testData = extractDataInKeyOrder(cache, fieldNames);
-			compareData(newControlData, testData);
-
-			// Now try reading the cache read-only.
-			cache.close();
-			cache.open(getContext(new HashMap<String,String>()));
-			testData = extractDataInKeyOrder(cache, fieldNames);
-			compareData(newControlData, testData);
-
-		} catch (Exception e) {
-			log.warn("Exception thrown: " + e.toString());
-			Assert.fail();
-		} finally {
-			try {
-				cache.destroy();
-			} catch (Exception ex) {
-			}
-		}
-	}
+  @Test
+  public void testCacheReopensWithUpdate() {
+    DIHCache cache = null;
+    try {      
+      Map<String, String> cacheProps = new HashMap<String, String>();
+      cacheProps.put(DIHCacheSupport.CACHE_PRIMARY_KEY, "a_id");
+      
+      cache = new SortedMapBackedCache();
+      cache.open(getContext(cacheProps));
+      // We can let the data hit the cache with the fields out of order because
+      // we've identified the pk up-front.
+      loadData(cache, data, fieldNames, false);
+
+      // Close the cache.
+      cache.close();
+
+      List<ControlData> newControlData = new ArrayList<ControlData>();
+      Object[] newIdEqualsThree = null;
+      int j = 0;
+      for (int i = 0; i < data.size(); i++) {
+        // We'll be deleting a_id=1 so remove it from the control data.
+        if (data.get(i).data[0].equals(new Integer(1))) {
+          continue;
+        }
+
+        // We'll be changing "Cookie" to "Carrot" in a_id=3 so change it in the control data.
+        if (data.get(i).data[0].equals(new Integer(3))) {
+          newIdEqualsThree = new Object[data.get(i).data.length];
+          System.arraycopy(data.get(i).data, 0, newIdEqualsThree, 0, newIdEqualsThree.length);
+          newIdEqualsThree[3] = "Carrot";
+          newControlData.add(new ControlData(newIdEqualsThree));
+        }
+        // Everything else can just be copied over.
+        else {
+          newControlData.add(data.get(i));
+        }
+
+        j++;
+      }
+
+      // These new rows of data will get added to the cache, so add them to the control data too.
+      Object[] newDataRow1 = new Object[] { new Integer(99), new BigDecimal(Math.PI), "Z", "Zebra", new Float(99.99), Feb21_2011, null };
+      Object[] newDataRow2 = new Object[] { new Integer(2), new BigDecimal(Math.PI), "B", "Ballerina", new Float(2.22), Feb21_2011, null };
+
+      newControlData.add(new ControlData(newDataRow1));
+      newControlData.add(new ControlData(newDataRow2));
+
+      // Re-open the cache
+      cache.open(getContext(new HashMap<String,String>()));
+
+      // Delete a_id=1 from the cache.
+      cache.delete(new Integer(1));
+
+      // Because the cache allows duplicates, the only way to update is to
+      // delete first then add.
+      cache.delete(new Integer(3));
+      cache.add(controlDataToMap(new ControlData(newIdEqualsThree), fieldNames, false));
+
+      // Add this row with a new Primary key.
+      cache.add(controlDataToMap(new ControlData(newDataRow1), fieldNames, false));
+
+      // Add this row, creating two records in the cache with a_id=2.
+      cache.add(controlDataToMap(new ControlData(newDataRow2), fieldNames, false));
+
+      // Read the cache back and compare to the newControlData
+      List<ControlData> testData = extractDataInKeyOrder(cache, fieldNames);
+      compareData(newControlData, testData);
+
+      // Now try reading the cache read-only.
+      cache.close();
+      cache.open(getContext(new HashMap<String,String>()));
+      testData = extractDataInKeyOrder(cache, fieldNames);
+      compareData(newControlData, testData);
+
+    } catch (Exception e) {
+      log.warn("Exception thrown: " + e.toString());
+      Assert.fail();
+    } finally {
+      try {
+        cache.destroy();
+      } catch (Exception ex) {
+      }
+    }
+  }
 }

Modified: lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/ts/EntityAnnotation_Type.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/ts/EntityAnnotation_Type.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/ts/EntityAnnotation_Type.java (original)
+++ lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/ts/EntityAnnotation_Type.java Mon Sep 17 16:01:56 2012
@@ -1,4 +1,3 @@
-
 /* First created by JCasGen Sat May 07 22:33:38 JST 2011 */
 package org.apache.solr.uima.ts;
 
@@ -23,17 +22,17 @@ public class EntityAnnotation_Type exten
   private final FSGenerator fsGenerator = 
     new FSGenerator() {
       public FeatureStructure createFS(int addr, CASImpl cas) {
-  			 if (EntityAnnotation_Type.this.useExistingInstance) {
-  			   // Return eq fs instance if already created
-  		     FeatureStructure fs = EntityAnnotation_Type.this.jcas.getJfsFromCaddr(addr);
-  		     if (null == fs) {
-  		       fs = new EntityAnnotation(addr, EntityAnnotation_Type.this);
-  			   EntityAnnotation_Type.this.jcas.putJfsFromCaddr(addr, fs);
-  			   return fs;
-  		     }
-  		     return fs;
+         if (EntityAnnotation_Type.this.useExistingInstance) {
+           // Return eq fs instance if already created
+           FeatureStructure fs = EntityAnnotation_Type.this.jcas.getJfsFromCaddr(addr);
+           if (null == fs) {
+             fs = new EntityAnnotation(addr, EntityAnnotation_Type.this);
+             EntityAnnotation_Type.this.jcas.putJfsFromCaddr(addr, fs);
+             return fs;
+           }
+           return fs;
         } else return new EntityAnnotation(addr, EntityAnnotation_Type.this);
-  	  }
+      }
     };
   /** @generated */
   public final static int typeIndexID = EntityAnnotation.typeIndexID;
@@ -80,7 +79,7 @@ public class EntityAnnotation_Type exten
 
 
   /** initialize variables to correspond with Cas Type and Features
-	* @generated */
+  * @generated */
   public EntityAnnotation_Type(JCas jcas, Type casType) {
     super(jcas, casType);
     casImpl.getFSClassRegistry().addGeneratorForType((TypeImpl)this.casType, getFSGenerator());
@@ -98,4 +97,4 @@ public class EntityAnnotation_Type exten
 
 
 
-    
\ No newline at end of file
+    

Modified: lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/ts/SentimentAnnotation_Type.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/ts/SentimentAnnotation_Type.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/ts/SentimentAnnotation_Type.java (original)
+++ lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/ts/SentimentAnnotation_Type.java Mon Sep 17 16:01:56 2012
@@ -1,4 +1,3 @@
-
 /* First created by JCasGen Fri Mar 04 13:08:40 CET 2011 */
 package org.apache.solr.uima.ts;
 
@@ -23,17 +22,17 @@ public class SentimentAnnotation_Type ex
   private final FSGenerator fsGenerator = 
     new FSGenerator() {
       public FeatureStructure createFS(int addr, CASImpl cas) {
-  			 if (SentimentAnnotation_Type.this.useExistingInstance) {
-  			   // Return eq fs instance if already created
-  		     FeatureStructure fs = SentimentAnnotation_Type.this.jcas.getJfsFromCaddr(addr);
-  		     if (null == fs) {
-  		       fs = new SentimentAnnotation(addr, SentimentAnnotation_Type.this);
-  			   SentimentAnnotation_Type.this.jcas.putJfsFromCaddr(addr, fs);
-  			   return fs;
-  		     }
-  		     return fs;
+        if (SentimentAnnotation_Type.this.useExistingInstance) {
+          // Return eq fs instance if already created
+          FeatureStructure fs = SentimentAnnotation_Type.this.jcas.getJfsFromCaddr(addr);
+          if (null == fs) {
+            fs = new SentimentAnnotation(addr, SentimentAnnotation_Type.this);
+            SentimentAnnotation_Type.this.jcas.putJfsFromCaddr(addr, fs);
+            return fs;
+          }
+          return fs;
         } else return new SentimentAnnotation(addr, SentimentAnnotation_Type.this);
-  	  }
+      }
     };
   /** @generated */
   public final static int typeIndexID = SentimentAnnotation.typeIndexID;
@@ -62,7 +61,7 @@ public class SentimentAnnotation_Type ex
 
 
   /** initialize variables to correspond with Cas Type and Features
-	* @generated */
+  * @generated */
   public SentimentAnnotation_Type(JCas jcas, Type casType) {
     super(jcas, casType);
     casImpl.getFSClassRegistry().addGeneratorForType((TypeImpl)this.casType, getFSGenerator());
@@ -76,4 +75,4 @@ public class SentimentAnnotation_Type ex
 
 
 
-    
\ No newline at end of file
+    

Modified: lucene/dev/trunk/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java (original)
+++ lucene/dev/trunk/solr/contrib/velocity/src/java/org/apache/solr/response/PageTool.java Mon Sep 17 16:01:56 2012
@@ -58,7 +58,7 @@ public class PageTool {
         results_found = doc_list.getNumFound();
         start = doc_list.getStart();
       } else {
-	  throw new SolrException(SolrException.ErrorCode.UNKNOWN, "Unknown response type "+docs+". Expected one of DocSlice, ResultContext or SolrDocumentList");
+        throw new SolrException(SolrException.ErrorCode.UNKNOWN, "Unknown response type "+docs+". Expected one of DocSlice, ResultContext or SolrDocumentList");
       }
     }
 

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java Mon Sep 17 16:01:56 2012
@@ -54,11 +54,11 @@ public class CloudDescriptor {
   }
 
   public String getRoles(){
-	  return roles;
+    return roles;
   }
   
   public void setRoles(String roles){
-	  this.roles = roles;
+    this.roles = roles;
   }
   
   /** Optional parameters that can change how a core is created. */

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/Config.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/Config.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/Config.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/Config.java Mon Sep 17 16:01:56 2012
@@ -141,8 +141,8 @@ public class Config {
       SolrException.log(log, "Exception during parsing file: " + name, e);
       throw e;
     } catch( SolrException e ){
-    	SolrException.log(log,"Error in "+name,e);
-    	throw e;
+      SolrException.log(log,"Error in "+name,e);
+      throw e;
     }
   }
   

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/CoreContainer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/CoreContainer.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/CoreContainer.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/CoreContainer.java Mon Sep 17 16:01:56 2012
@@ -529,7 +529,7 @@ public class CoreContainer 
           }
           opt = DOMUtil.getAttr(node, CORE_ROLES, null);
           if(opt != null){
-        	  p.getCloudDescriptor().setRoles(opt);
+            p.getCloudDescriptor().setRoles(opt);
           }
         }
         opt = DOMUtil.getAttr(node, CORE_PROPERTIES, null);

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java Mon Sep 17 16:01:56 2012
@@ -409,11 +409,11 @@ public class ResponseBuilder
 
   public ScoreDoc getScoreDoc()
   {
-	  return scoreDoc;
+    return scoreDoc;
   }
   
   public void setScoreDoc(ScoreDoc scoreDoc)
   {
-	  this.scoreDoc = scoreDoc;
+    this.scoreDoc = scoreDoc;
   }
 }

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java Mon Sep 17 16:01:56 2012
@@ -191,41 +191,41 @@ public class SpellCheckComponent extends
   }
   
   @SuppressWarnings("unchecked")
-	protected void addCollationsToResponse(SolrParams params, SpellingResult spellingResult, ResponseBuilder rb, String q,
-	    NamedList response, boolean suggestionsMayOverlap) {
-		int maxCollations = params.getInt(SPELLCHECK_MAX_COLLATIONS, 1);
-		int maxCollationTries = params.getInt(SPELLCHECK_MAX_COLLATION_TRIES, 0);
-		int maxCollationEvaluations = params.getInt(SPELLCHECK_MAX_COLLATION_EVALUATIONS, 10000);
-		boolean collationExtendedResults = params.getBool(SPELLCHECK_COLLATE_EXTENDED_RESULTS, false);
-		boolean shard = params.getBool(ShardParams.IS_SHARD, false);
+  protected void addCollationsToResponse(SolrParams params, SpellingResult spellingResult, ResponseBuilder rb, String q,
+      NamedList response, boolean suggestionsMayOverlap) {
+    int maxCollations = params.getInt(SPELLCHECK_MAX_COLLATIONS, 1);
+    int maxCollationTries = params.getInt(SPELLCHECK_MAX_COLLATION_TRIES, 0);
+    int maxCollationEvaluations = params.getInt(SPELLCHECK_MAX_COLLATION_EVALUATIONS, 10000);
+    boolean collationExtendedResults = params.getBool(SPELLCHECK_COLLATE_EXTENDED_RESULTS, false);
+    boolean shard = params.getBool(ShardParams.IS_SHARD, false);
 
-		SpellCheckCollator collator = new SpellCheckCollator();
-		List<SpellCheckCollation> collations = collator.collate(spellingResult, q, rb, maxCollations, maxCollationTries, maxCollationEvaluations, suggestionsMayOverlap);
+    SpellCheckCollator collator = new SpellCheckCollator();
+    List<SpellCheckCollation> collations = collator.collate(spellingResult, q, rb, maxCollations, maxCollationTries, maxCollationEvaluations, suggestionsMayOverlap);
     //by sorting here we guarantee a non-distributed request returns all 
-		//results in the same order as a distributed request would, 
-		//even in cases when the internal rank is the same.
-		Collections.sort(collations);
-		
-		for (SpellCheckCollation collation : collations) {
-			if (collationExtendedResults) {
-				NamedList extendedResult = new NamedList();
-				extendedResult.add("collationQuery", collation.getCollationQuery());
-				extendedResult.add("hits", collation.getHits());
-				extendedResult.add("misspellingsAndCorrections", collation.getMisspellingsAndCorrections());
-				if(maxCollationTries>0 && shard)
-				{
-					extendedResult.add("collationInternalRank", collation.getInternalRank());
-				}
-				response.add("collation", extendedResult);
-			} else {
-				response.add("collation", collation.getCollationQuery());
-				if(maxCollationTries>0 && shard)
-				{
-					response.add("collationInternalRank", collation.getInternalRank());
-				}
-			}
-		}
-	}
+    //results in the same order as a distributed request would,
+    //even in cases when the internal rank is the same.
+    Collections.sort(collations);
+
+    for (SpellCheckCollation collation : collations) {
+      if (collationExtendedResults) {
+        NamedList extendedResult = new NamedList();
+        extendedResult.add("collationQuery", collation.getCollationQuery());
+        extendedResult.add("hits", collation.getHits());
+        extendedResult.add("misspellingsAndCorrections", collation.getMisspellingsAndCorrections());
+        if(maxCollationTries>0 && shard)
+        {
+          extendedResult.add("collationInternalRank", collation.getInternalRank());
+        }
+        response.add("collation", extendedResult);
+      } else {
+        response.add("collation", collation.getCollationQuery());
+        if(maxCollationTries>0 && shard)
+        {
+          response.add("collationInternalRank", collation.getInternalRank());
+        }
+      }
+    }
+  }
 
   /**
    * For every param that is of the form "spellcheck.[dictionary name].XXXX=YYYY, add
@@ -297,8 +297,8 @@ public class SpellCheckComponent extends
           NamedList nl = (NamedList) srsp.getSolrResponse().getResponse().get("spellcheck");
           LOG.info(srsp.getShard() + " " + nl);
           if (nl != null) {
-          	mergeData.totalNumberShardResponses++;
-          	collectShardSuggestions(nl, mergeData);          
+            mergeData.totalNumberShardResponses++;
+            collectShardSuggestions(nl, mergeData);
             collectShardCollations(mergeData, nl, maxCollationTries);
           }
         }
@@ -317,22 +317,22 @@ public class SpellCheckComponent extends
       SpellCheckCollation[] sortedCollations = mergeData.collations.values()
           .toArray(new SpellCheckCollation[mergeData.collations.size()]);
       Arrays.sort(sortedCollations);
-			int i = 0;
-			while (i < maxCollations && i < sortedCollations.length) {
-				SpellCheckCollation collation = sortedCollations[i];
-				i++;
-				if (collationExtendedResults) {
-					NamedList extendedResult = new NamedList();
-					extendedResult.add("collationQuery", collation.getCollationQuery());
-					extendedResult.add("hits", collation.getHits());
-					extendedResult.add("misspellingsAndCorrections", collation
-							.getMisspellingsAndCorrections());
-					suggestions.add("collation", extendedResult);
-				} else {
-					suggestions.add("collation", collation.getCollationQuery());
-				}
-			}
-		}
+      int i = 0;
+      while (i < maxCollations && i < sortedCollations.length) {
+        SpellCheckCollation collation = sortedCollations[i];
+        i++;
+        if (collationExtendedResults) {
+          NamedList extendedResult = new NamedList();
+          extendedResult.add("collationQuery", collation.getCollationQuery());
+          extendedResult.add("hits", collation.getHits());
+          extendedResult.add("misspellingsAndCorrections", collation
+              .getMisspellingsAndCorrections());
+          suggestions.add("collation", extendedResult);
+        } else {
+          suggestions.add("collation", collation.getCollationQuery());
+        }
+      }
+    }
     
     response.add("suggestions", suggestions);
     rb.rsp.add("spellcheck", response);

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/handler/loader/CSVLoaderBase.java Mon Sep 17 16:01:56 2012
@@ -388,12 +388,12 @@ abstract class CSVLoaderBase extends Con
 
     // add any literals
     for (SchemaField sf : literals.keySet()) {
-    	String fn = sf.getName();
-    	String val = literals.get(sf);
-    	doc.addField(fn, val);
+      String fn = sf.getName();
+      String val = literals.get(sf);
+      doc.addField(fn, val);
     }
    
     template.solrDoc = doc;
     processor.processAdd(template);
   }
-}
\ No newline at end of file
+}

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/PHPSerializedResponseWriter.java Mon Sep 17 16:01:56 2012
@@ -173,10 +173,10 @@ class PHPSerializedWriter extends JSONWr
   
   @Override
   public void writeMapOpener(int size) throws IOException, IllegalArgumentException {
-  	// negative size value indicates that something has gone wrong
-  	if (size < 0) {
-  		throw new IllegalArgumentException("Map size must not be negative");
-  	}
+    // negative size value indicates that something has gone wrong
+    if (size < 0) {
+      throw new IllegalArgumentException("Map size must not be negative");
+    }
     writer.write("a:"+size+":{");
   }
   
@@ -192,10 +192,10 @@ class PHPSerializedWriter extends JSONWr
 
   @Override
   public void writeArrayOpener(int size) throws IOException, IllegalArgumentException {
-  	// negative size value indicates that something has gone wrong
-  	if (size < 0) {
-  		throw new IllegalArgumentException("Array size must not be negative");
-  	}
+    // negative size value indicates that something has gone wrong
+    if (size < 0) {
+      throw new IllegalArgumentException("Array size must not be negative");
+    }
     writer.write("a:"+size+":{");
   }
 

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/RawResponseWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/RawResponseWriter.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/RawResponseWriter.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/RawResponseWriter.java Mon Sep 17 16:01:56 2012
@@ -96,7 +96,7 @@ public class RawResponseWriter implement
   }
 
 public void write(OutputStream out, SolrQueryRequest request,
-		SolrQueryResponse response) throws IOException {
+    SolrQueryResponse response) throws IOException {
     Object obj = response.getValues().get( CONTENT );
     if( obj != null && (obj instanceof ContentStream ) ) {
       // copy the contents to the writer...
@@ -110,8 +110,8 @@ public void write(OutputStream out, Solr
     }
     else {
       //getBaseWriter( request ).write( writer, request, response );
-    	throw new IOException("did not find a CONTENT object");
+      throw new IOException("did not find a CONTENT object");
     }
-	
-}
+
+  }
 }

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/SolrQueryResponse.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/SolrQueryResponse.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/SolrQueryResponse.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/response/SolrQueryResponse.java Mon Sep 17 16:01:56 2012
@@ -194,8 +194,8 @@ public class SolrQueryResponse {
   /** Repsonse header to be logged */ 
   public NamedList<Object> getResponseHeader() {
     @SuppressWarnings("unchecked")
-	  SimpleOrderedMap<Object> header = (SimpleOrderedMap<Object>) values.get("responseHeader");
-	  return header;
+    SimpleOrderedMap<Object> header = (SimpleOrderedMap<Object>) values.get("responseHeader");
+    return header;
   }
   
   /** Add a value to be logged.
@@ -204,7 +204,7 @@ public class SolrQueryResponse {
    * @param val value of the thing to log
    */
   public void addToLog(String name, Object val) {
-	  toLog.add(name, val);
+    toLog.add(name, val);
   }
   
   /** Get loggable items.
@@ -212,7 +212,7 @@ public class SolrQueryResponse {
    * @return things to log
    */
   public NamedList<Object> getToLog() {
-	  return toLog;
+    return toLog;
   }
   
   /**

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/schema/SchemaField.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/schema/SchemaField.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/schema/SchemaField.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/schema/SchemaField.java Mon Sep 17 16:01:56 2012
@@ -185,7 +185,7 @@ public final class SchemaField extends F
 
     String defaultValue = null;
     if( props.containsKey( "default" ) ) {
-    	defaultValue = props.get( "default" );
+      defaultValue = props.get( "default" );
     }
     return new SchemaField(name, ft, calcProps(name, ft, props), defaultValue );
   }

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/QParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/QParser.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/QParser.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/QParser.java Mon Sep 17 16:01:56 2012
@@ -224,18 +224,18 @@ public abstract class QParser {
     String pageScoreS = null;
     String pageDocS = null;
 
-	  pageScoreS = params.get(CommonParams.PAGESCORE);
-	  pageDocS = params.get(CommonParams.PAGEDOC);
-		  
-	  if (pageScoreS == null || pageDocS == null)
-		  return null;
-	  
-	  int pageDoc = pageDocS != null ? Integer.parseInt(pageDocS) : -1;
-	  float pageScore = pageScoreS != null ? new Float(pageScoreS) : -1;
-	  if(pageDoc != -1 && pageScore != -1){
+    pageScoreS = params.get(CommonParams.PAGESCORE);
+    pageDocS = params.get(CommonParams.PAGEDOC);
+
+    if (pageScoreS == null || pageDocS == null)
+      return null;
+
+    int pageDoc = pageDocS != null ? Integer.parseInt(pageDocS) : -1;
+    float pageScore = pageScoreS != null ? new Float(pageScoreS) : -1;
+    if(pageDoc != -1 && pageScore != -1){
       return new ScoreDoc(pageDoc, pageScore);
     }
-	  else {
+    else {
       return null;
     }
 

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java Mon Sep 17 16:01:56 2012
@@ -1369,7 +1369,7 @@ public class SolrIndexSearcher extends I
       TopDocsCollector topCollector;
       if (cmd.getSort() == null) {
         if(cmd.getScoreDoc() != null) {
-        	topCollector = TopScoreDocCollector.create(len, cmd.getScoreDoc(), true); //create the Collector with InOrderPagingCollector
+          topCollector = TopScoreDocCollector.create(len, cmd.getScoreDoc(), true); //create the Collector with InOrderPagingCollector
         } else {
           topCollector = TopScoreDocCollector.create(len, true);
         }
@@ -2029,11 +2029,11 @@ public class SolrIndexSearcher extends I
     
     public ScoreDoc getScoreDoc()
     {
-    	return scoreDoc;
+      return scoreDoc;
     }
     public void setScoreDoc(ScoreDoc scoreDoc)
     {
-    	this.scoreDoc = scoreDoc;
+      this.scoreDoc = scoreDoc;
     }
     //Issue 1726 end
 

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java Mon Sep 17 16:01:56 2012
@@ -142,7 +142,7 @@ public abstract class AbstractLuceneSpel
   
   @Override
   public SpellingResult getSuggestions(SpellingOptions options) throws IOException {
-  	SpellingResult result = new SpellingResult(options.tokens);
+    SpellingResult result = new SpellingResult(options.tokens);
     IndexReader reader = determineReader(options.reader);
     Term term = field != null ? new Term(field, "") : null;
     float theAccuracy = (options.accuracy == Float.MIN_VALUE) ? spellChecker.getAccuracy() : options.accuracy;
@@ -187,13 +187,13 @@ public abstract class AbstractLuceneSpel
         int countLimit = Math.min(options.count, suggestions.length);
         if(countLimit>0)
         {
-	        for (int i = 0; i < countLimit; i++) {
-	          term = new Term(field, suggestions[i]);
-	          result.add(token, suggestions[i], reader.docFreq(term));
-	        }
+          for (int i = 0; i < countLimit; i++) {
+            term = new Term(field, suggestions[i]);
+            result.add(token, suggestions[i], reader.docFreq(term));
+          }
         } else {
-        	List<String> suggList = Collections.emptyList();
-        	result.add(token, suggList);
+          List<String> suggList = Collections.emptyList();
+          result.add(token, suggList);
         }
       } else {
         if (suggestions.length > 0) {
@@ -203,8 +203,8 @@ public abstract class AbstractLuceneSpel
           }
           result.add(token, suggList);
         } else {
-        	List<String> suggList = Collections.emptyList();
-        	result.add(token, suggList);
+          List<String> suggList = Collections.emptyList();
+          result.add(token, suggList);
         }
       }
     }

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/ConjunctionSolrSpellChecker.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/ConjunctionSolrSpellChecker.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/ConjunctionSolrSpellChecker.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/ConjunctionSolrSpellChecker.java Mon Sep 17 16:01:56 2012
@@ -139,7 +139,7 @@ public class ConjunctionSolrSpellChecker
     Map<Token, Integer> combinedTokenFrequency = new HashMap<Token, Integer>();
     Map<Token, List<LinkedHashMap<String, Integer>>> allSuggestions = new LinkedHashMap<Token, List<LinkedHashMap<String, Integer>>>();
     for(SpellingResult result : results) {
-    	if(result.getTokenFrequency()!=null) {
+      if(result.getTokenFrequency()!=null) {
         combinedTokenFrequency.putAll(result.getTokenFrequency());
       }
       for(Map.Entry<Token, LinkedHashMap<String, Integer>> entry : result.getSuggestions().entrySet()) {

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java Mon Sep 17 16:01:56 2012
@@ -214,7 +214,7 @@ public class DirectSolrSpellChecker exte
         result.add(token, empty);
       } else {        
         for (SuggestWord suggestion : suggestions) {
-          result.add(token, suggestion.string, suggestion.freq);      	
+          result.add(token, suggestion.string, suggestion.freq);
         }
       }
     }

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/update/UpdateLog.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/update/UpdateLog.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/update/UpdateLog.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/update/UpdateLog.java Mon Sep 17 16:01:56 2012
@@ -17,7 +17,6 @@
 
 package org.apache.solr.update;
 
-import org.apache.lucene.search.Query;
 import org.apache.lucene.util.BytesRef;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
@@ -63,17 +62,17 @@ public class UpdateLog implements Plugin
 
 
   public enum SyncLevel { NONE, FLUSH, FSYNC;
-	  public static SyncLevel getSyncLevel(String level){
-	    if (level == null) {
-	      return SyncLevel.FLUSH;
-	    }
-		  try{
-			  return SyncLevel.valueOf(level.toUpperCase(Locale.ROOT));
-		  } catch(Exception ex){
-		    log.warn("There was an error reading the SyncLevel - default to " + SyncLevel.FLUSH, ex);
-			  return SyncLevel.FLUSH;
-		  }
-	  }
+    public static SyncLevel getSyncLevel(String level){
+      if (level == null) {
+        return SyncLevel.FLUSH;
+      }
+      try{
+        return SyncLevel.valueOf(level.toUpperCase(Locale.ROOT));
+      } catch(Exception ex){
+        log.warn("There was an error reading the SyncLevel - default to " + SyncLevel.FLUSH, ex);
+        return SyncLevel.FLUSH;
+      }
+    }
   }
   public enum State { REPLAYING, BUFFERING, APPLYING_BUFFERED, ACTIVE }
 

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/util/LongPriorityQueue.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/util/LongPriorityQueue.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/util/LongPriorityQueue.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/util/LongPriorityQueue.java Mon Sep 17 16:01:56 2012
@@ -152,10 +152,10 @@ public class LongPriorityQueue {
     time.  Only valid if size() > 0.
    */
   public long pop() {
-    long result = heap[1];	          // save first value
-    heap[1] = heap[size];	          // move last to first
+    long result = heap[1];            // save first value
+    heap[1] = heap[size];            // move last to first
     size--;
-    downHeap();				  // adjust heap
+    downHeap();          // adjust heap
     return result;
   }
   
@@ -187,11 +187,11 @@ public class LongPriorityQueue {
    */
   public long[] sort(int n) {
     while (--n >= 0) {
-      long result = heap[1];	          // save first value
-      heap[1] = heap[size];	          // move last to first
+      long result = heap[1];            // save first value
+      heap[1] = heap[size];            // move last to first
       heap[size] = result;                  // place it last
       size--;
-      downHeap();				  // adjust heap
+      downHeap();          // adjust heap
     }
     return heap;
   }
@@ -203,26 +203,26 @@ public class LongPriorityQueue {
 
   private void upHeap() {
     int i = size;
-    long node = heap[i];			  // save bottom node
+    long node = heap[i];        // save bottom node
     int j = i >>> 1;
     while (j > 0 && node < heap[j]) {
-      heap[i] = heap[j];			  // shift parents down
+      heap[i] = heap[j];        // shift parents down
       i = j;
       j = j >>> 1;
     }
-    heap[i] = node;				  // install saved node
+    heap[i] = node;          // install saved node
   }
 
   private void downHeap() {
     int i = 1;
-    long node = heap[i];			  // save top node
-    int j = i << 1;				  // find smaller child
+    long node = heap[i];        // save top node
+    int j = i << 1;          // find smaller child
     int k = j + 1;
     if (k <= size && heap[k] < heap[j]) {
       j = k;
     }
     while (j <= size && heap[j] < node) {
-      heap[i] = heap[j];			  // shift up child
+      heap[i] = heap[j];        // shift up child
       i = j;
       j = i << 1;
       k = j + 1;
@@ -230,6 +230,6 @@ public class LongPriorityQueue {
         j = k;
       }
     }
-    heap[i] = node;				  // install saved node
+    heap[i] = node;          // install saved node
   }
 }

Modified: lucene/dev/trunk/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java (original)
+++ lucene/dev/trunk/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java Mon Sep 17 16:01:56 2012
@@ -140,7 +140,7 @@ public class TestDistributedGrouping ext
 
     commit();
 
-	  // test grouping
+    // test grouping
     // The second sort = id asc . The sorting behaviour is different in dist mode. See TopDocs#merge
     // The shard the result came from matters in the order if both document sortvalues are equal
     query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc");

Modified: lucene/dev/trunk/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java (original)
+++ lucene/dev/trunk/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java Mon Sep 17 16:01:56 2012
@@ -112,65 +112,65 @@ public class XmlUpdateRequestHandlerTest
   
   @Test
   public void testReadDelete() throws Exception {
-	    String xml =
-	      "<update>" +
-	      " <delete>" +
-	      "   <query>id:150</query>" +
-	      "   <id>150</id>" +
-	      "   <id>200</id>" +
-	      "   <query>id:200</query>" +
-	      " </delete>" +
-	      " <delete commitWithin=\"500\">" +
-	      "   <query>id:150</query>" +
-	      " </delete>" +
-	      " <delete>" +
-	      "   <id>150</id>" +
-	      " </delete>" +
-	      "</update>";
-	    
-	    MockUpdateRequestProcessor p = new MockUpdateRequestProcessor(null);
-	    p.expectDelete(null, "id:150", -1);
-	    p.expectDelete("150", null, -1);
-	    p.expectDelete("200", null, -1);
-	    p.expectDelete(null, "id:200", -1);
-	    p.expectDelete(null, "id:150", 500);
-	    p.expectDelete("150", null, -1);
-
-	    XMLLoader loader = new XMLLoader().init(null);
-	    loader.load(req(), new SolrQueryResponse(), new ContentStreamBase.StringStream(xml), p);
-	    
-	    p.assertNoCommandsPending();
-	  }
-	  
-	  private class MockUpdateRequestProcessor extends UpdateRequestProcessor {
-	    
-	    private Queue<DeleteUpdateCommand> deleteCommands = new LinkedList<DeleteUpdateCommand>();
-	    
-	    public MockUpdateRequestProcessor(UpdateRequestProcessor next) {
-	      super(next);
-	    }
-	    
-	    public void expectDelete(String id, String query, int commitWithin) {
-	      DeleteUpdateCommand cmd = new DeleteUpdateCommand(null);
-	      cmd.id = id;
-	      cmd.query = query;
-	      cmd.commitWithin = commitWithin;
-	      deleteCommands.add(cmd);
-	    }
-	    
-	    public void assertNoCommandsPending() {
-	      assertTrue(deleteCommands.isEmpty());
-	    }
-	    
-	    @Override
-	    public void processDelete(DeleteUpdateCommand cmd) throws IOException {
-	      DeleteUpdateCommand expected = deleteCommands.poll();
-	      assertNotNull("Unexpected delete command: [" + cmd + "]", expected);
-	      assertTrue("Expected [" + expected + "] but found [" + cmd + "]",
-	          ObjectUtils.equals(expected.id, cmd.id) &&
-	          ObjectUtils.equals(expected.query, cmd.query) &&
-	          expected.commitWithin==cmd.commitWithin);
-	    }
-	  }
+      String xml =
+        "<update>" +
+        " <delete>" +
+        "   <query>id:150</query>" +
+        "   <id>150</id>" +
+        "   <id>200</id>" +
+        "   <query>id:200</query>" +
+        " </delete>" +
+        " <delete commitWithin=\"500\">" +
+        "   <query>id:150</query>" +
+        " </delete>" +
+        " <delete>" +
+        "   <id>150</id>" +
+        " </delete>" +
+        "</update>";
+
+      MockUpdateRequestProcessor p = new MockUpdateRequestProcessor(null);
+      p.expectDelete(null, "id:150", -1);
+      p.expectDelete("150", null, -1);
+      p.expectDelete("200", null, -1);
+      p.expectDelete(null, "id:200", -1);
+      p.expectDelete(null, "id:150", 500);
+      p.expectDelete("150", null, -1);
+
+      XMLLoader loader = new XMLLoader().init(null);
+      loader.load(req(), new SolrQueryResponse(), new ContentStreamBase.StringStream(xml), p);
+
+      p.assertNoCommandsPending();
+    }
+
+    private class MockUpdateRequestProcessor extends UpdateRequestProcessor {
+
+      private Queue<DeleteUpdateCommand> deleteCommands = new LinkedList<DeleteUpdateCommand>();
+
+      public MockUpdateRequestProcessor(UpdateRequestProcessor next) {
+        super(next);
+      }
+
+      public void expectDelete(String id, String query, int commitWithin) {
+        DeleteUpdateCommand cmd = new DeleteUpdateCommand(null);
+        cmd.id = id;
+        cmd.query = query;
+        cmd.commitWithin = commitWithin;
+        deleteCommands.add(cmd);
+      }
+
+      public void assertNoCommandsPending() {
+        assertTrue(deleteCommands.isEmpty());
+      }
+
+      @Override
+      public void processDelete(DeleteUpdateCommand cmd) throws IOException {
+        DeleteUpdateCommand expected = deleteCommands.poll();
+        assertNotNull("Unexpected delete command: [" + cmd + "]", expected);
+        assertTrue("Expected [" + expected + "] but found [" + cmd + "]",
+            ObjectUtils.equals(expected.id, cmd.id) &&
+            ObjectUtils.equals(expected.query, cmd.query) &&
+            expected.commitWithin==cmd.commitWithin);
+      }
+    }
 
 }

Modified: lucene/dev/trunk/solr/core/src/test/org/apache/solr/handler/XsltUpdateRequestHandlerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/test/org/apache/solr/handler/XsltUpdateRequestHandlerTest.java?rev=1386681&r1=1386680&r2=1386681&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/test/org/apache/solr/handler/XsltUpdateRequestHandlerTest.java (original)
+++ lucene/dev/trunk/solr/core/src/test/org/apache/solr/handler/XsltUpdateRequestHandlerTest.java Mon Sep 17 16:01:56 2012
@@ -64,29 +64,29 @@ public class XsltUpdateRequestHandlerTes
       " </document>" +
       "</random>";
 
-  	Map<String,String> args = new HashMap<String, String>();
-  	args.put(CommonParams.TR, "xsl-update-handler-test.xsl");
+    Map<String,String> args = new HashMap<String, String>();
+    args.put(CommonParams.TR, "xsl-update-handler-test.xsl");
       
-  	SolrCore core = h.getCore();
-  	LocalSolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) );
-  	ArrayList<ContentStream> streams = new ArrayList<ContentStream>();
-  	streams.add(new ContentStreamBase.StringStream(xml));
-  	req.setContentStreams(streams);
-  	SolrQueryResponse rsp = new SolrQueryResponse();
-  	UpdateRequestHandler handler = new UpdateRequestHandler();
-  	handler.init(new NamedList<String>());
-  	handler.handleRequestBody(req, rsp);
-  	StringWriter sw = new StringWriter(32000);
-  	QueryResponseWriter responseWriter = core.getQueryResponseWriter(req);
-  	responseWriter.write(sw,req,rsp);
-  	req.close();
-  	String response = sw.toString();
-  	assertU(response);
+    SolrCore core = h.getCore();
+    LocalSolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) );
+    ArrayList<ContentStream> streams = new ArrayList<ContentStream>();
+    streams.add(new ContentStreamBase.StringStream(xml));
+    req.setContentStreams(streams);
+    SolrQueryResponse rsp = new SolrQueryResponse();
+    UpdateRequestHandler handler = new UpdateRequestHandler();
+    handler.init(new NamedList<String>());
+    handler.handleRequestBody(req, rsp);
+    StringWriter sw = new StringWriter(32000);
+    QueryResponseWriter responseWriter = core.getQueryResponseWriter(req);
+    responseWriter.write(sw,req,rsp);
+    req.close();
+    String response = sw.toString();
+    assertU(response);
     assertU(commit());
 
     assertQ("test document was correctly committed", req("q","*:*")
             , "//result[@numFound='1']"
             , "//int[@name='id'][.='12345']"
-    		);  
+        );
   }
 }