You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2014/03/16 20:39:37 UTC

svn commit: r1578144 [22/37] - in /lucene/dev/branches/lucene5376_2: ./ dev-tools/ dev-tools/idea/.idea/libraries/ dev-tools/idea/solr/contrib/dataimporthandler/ dev-tools/idea/solr/contrib/map-reduce/ dev-tools/idea/solr/core/src/test/ dev-tools/scrip...

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java Sun Mar 16 19:39:10 2014
@@ -48,7 +48,7 @@ public class TestScriptTransformer exten
       String script = "function f1(row,context){"
               + "row.put('name','Hello ' + row.get('name'));" + "return row;\n" + "}";
       Context context = getContext("f1", script);
-      Map<String, Object> map = new HashMap<String, Object>();
+      Map<String, Object> map = new HashMap<>();
       map.put("name", "Scott");
       EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null, null);
       sep.init(context);
@@ -62,8 +62,8 @@ public class TestScriptTransformer exten
   }
 
   private Context getContext(String funcName, String script) {
-    List<Map<String, String>> fields = new ArrayList<Map<String, String>>();
-    Map<String, String> entity = new HashMap<String, String>();
+    List<Map<String, String>> fields = new ArrayList<>();
+    Map<String, String> entity = new HashMap<>();
     entity.put("name", "hello");
     entity.put("transformer", "script:" + funcName);
 
@@ -81,7 +81,7 @@ public class TestScriptTransformer exten
               + "row.put('name','Hello ' + row.get('name'));" + "return row;\n" + "}";
 
       Context context = getContext("f1", script);
-      Map<String, Object> map = new HashMap<String, Object>();
+      Map<String, Object> map = new HashMap<>();
       map.put("name", "Scott");
       EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null, null);
       sep.init(context);

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java Sun Mar 16 19:39:10 2014
@@ -84,13 +84,13 @@ public class TestSimplePropertiesWriter 
       SimpleDateFormat df = new SimpleDateFormat(dateFormat, Locale.ROOT);
       Date oneSecondAgo = new Date(System.currentTimeMillis() - 1000);
       
-      Map<String,String> init = new HashMap<String,String>();
+      Map<String,String> init = new HashMap<>();
       init.put("dateFormat", dateFormat);
       init.put("filename", fileName);
       init.put("directory", fileLocation);
       SimplePropertiesWriter spw = new SimplePropertiesWriter();
       spw.init(new DataImporter(), init);
-      Map<String, Object> props = new HashMap<String,Object>();
+      Map<String, Object> props = new HashMap<>();
       props.put("SomeDates.last_index_time", oneSecondAgo);
       props.put("last_index_time", oneSecondAgo);
       spw.persist(props);

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java Sun Mar 16 19:39:10 2014
@@ -53,17 +53,17 @@ public class TestSolrEntityProcessorEndT
 
   private static final String DEAD_SOLR_SERVER = "http://[ff01::114]:33332/solr";
   
-  private static final List<Map<String,Object>> DB_DOCS = new ArrayList<Map<String,Object>>();
-  private static final List<Map<String,Object>> SOLR_DOCS = new ArrayList<Map<String,Object>>();
+  private static final List<Map<String,Object>> DB_DOCS = new ArrayList<>();
+  private static final List<Map<String,Object>> SOLR_DOCS = new ArrayList<>();
   
   static {
     // dynamic fields in the destination schema
-    Map<String,Object> dbDoc = new HashMap<String,Object>();
+    Map<String,Object> dbDoc = new HashMap<>();
     dbDoc.put("dbid_s", "1");
     dbDoc.put("dbdesc_s", "DbDescription");
     DB_DOCS.add(dbDoc);
 
-    Map<String,Object> solrDoc = new HashMap<String,Object>();
+    Map<String,Object> solrDoc = new HashMap<>();
     solrDoc.put("id", "1");
     solrDoc.put("desc", "SolrDescription");
     SOLR_DOCS.add(solrDoc);
@@ -161,7 +161,7 @@ public class TestSolrEntityProcessorEndT
     
     try {
       addDocumentsToSolr(generateSolrDocuments(30));
-      Map<String,String> map = new HashMap<String,String>();
+      Map<String,String> map = new HashMap<>();
       map.put("rows", "50");
       runFullImport(generateDIHConfig("query='*:*' fq='desc:Description1*,desc:Description*2' rows='2'", false), map);
     } catch (Exception e) {
@@ -202,15 +202,15 @@ public class TestSolrEntityProcessorEndT
     assertQ(req("*:*"), "//result[@numFound='0']");
     
     try {
-      List<Map<String,Object>> DOCS = new ArrayList<Map<String,Object>>(DB_DOCS);
-      Map<String, Object> doc = new HashMap<String, Object>();
+      List<Map<String,Object>> DOCS = new ArrayList<>(DB_DOCS);
+      Map<String, Object> doc = new HashMap<>();
       doc.put("dbid_s", "2");
       doc.put("dbdesc_s", "DbDescription2");
       DOCS.add(doc);
       MockDataSource.setIterator("select * from x", DOCS.iterator());
 
-      DOCS = new ArrayList<Map<String,Object>>(SOLR_DOCS);
-      Map<String,Object> solrDoc = new HashMap<String,Object>();
+      DOCS = new ArrayList<>(SOLR_DOCS);
+      Map<String,Object> solrDoc = new HashMap<>();
       solrDoc.put("id", "2");
       solrDoc.put("desc", "SolrDescription2");
       DOCS.add(solrDoc);
@@ -261,9 +261,9 @@ public class TestSolrEntityProcessorEndT
   }
     
   private static List<Map<String,Object>> generateSolrDocuments(int num) {
-    List<Map<String,Object>> docList = new ArrayList<Map<String,Object>>();
+    List<Map<String,Object>> docList = new ArrayList<>();
     for (int i = 1; i <= num; i++) {
-      Map<String,Object> map = new HashMap<String,Object>();
+      Map<String,Object> map = new HashMap<>();
       map.put("id", i);
       map.put("desc", "Description" + i);
       docList.add(map);
@@ -272,7 +272,7 @@ public class TestSolrEntityProcessorEndT
   }
   
   private void addDocumentsToSolr(List<Map<String,Object>> docs) throws SolrServerException, IOException {
-    List<SolrInputDocument> sidl = new ArrayList<SolrInputDocument>();
+    List<SolrInputDocument> sidl = new ArrayList<>();
     for (Map<String,Object> doc : docs) {
       SolrInputDocument sd = new SolrInputDocument();
       for (Entry<String,Object> entry : doc.entrySet()) {

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java Sun Mar 16 19:39:10 2014
@@ -46,8 +46,8 @@ public class TestSolrEntityProcessorUnit
   }
 
   public void testMultiValuedFields() {
-    List<Doc> docs = new ArrayList<Doc>();
-    List<FldType> types = new ArrayList<FldType>();
+    List<Doc> docs = new ArrayList<>();
+    List<FldType> types = new ArrayList<>();
     types.add(new FldType(ID, ONE_ONE, new SVal('A', 'Z', 4, 4)));
     types.add(new FldType("description", new IRange(3, 3), new SVal('a', 'c', 1, 1)));
     Doc testDoc = createDoc(types);
@@ -66,12 +66,12 @@ public class TestSolrEntityProcessorUnit
   }
 
   private List<Doc> generateUniqueDocs(int numDocs) {
-    List<FldType> types = new ArrayList<FldType>();
+    List<FldType> types = new ArrayList<>();
     types.add(new FldType(ID, ONE_ONE, new SVal('A', 'Z', 4, 40)));
     types.add(new FldType("description", new IRange(1, 3), new SVal('a', 'c', 1, 1)));
 
-    Set<Comparable> previousIds = new HashSet<Comparable>();
-    List<Doc> docs = new ArrayList<Doc>(numDocs);
+    Set<Comparable> previousIds = new HashSet<>();
+    List<Doc> docs = new ArrayList<>(numDocs);
     for (int i = 0; i < numDocs; i++) {
       Doc doc = createDoc(types);
       while (previousIds.contains(doc.id)) {

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java Sun Mar 16 19:39:10 2014
@@ -79,11 +79,11 @@ public class TestSortedMapBackedCache ex
     DIHCache cache = null;
     try {
       cache = new SortedMapBackedCache();
-      Map<String, String> cacheProps = new HashMap<String, String>();
+      Map<String, String> cacheProps = new HashMap<>();
       cacheProps.put(DIHCacheSupport.CACHE_PRIMARY_KEY, "a_id");
       cache.open(getContext(cacheProps));
       
-      Map<String,Object> data = new HashMap<String,Object>();
+      Map<String,Object> data = new HashMap<>();
       data.put("a_id", null);
       data.put("bogus", "data");
       cache.add(data);
@@ -108,7 +108,7 @@ public class TestSortedMapBackedCache ex
   public void testCacheReopensWithUpdate() {
     DIHCache cache = null;
     try {      
-      Map<String, String> cacheProps = new HashMap<String, String>();
+      Map<String, String> cacheProps = new HashMap<>();
       cacheProps.put(DIHCacheSupport.CACHE_PRIMARY_KEY, "a_id");
       
       cache = new SortedMapBackedCache();
@@ -120,7 +120,7 @@ public class TestSortedMapBackedCache ex
       // Close the cache.
       cache.close();
 
-      List<ControlData> newControlData = new ArrayList<ControlData>();
+      List<ControlData> newControlData = new ArrayList<>();
       Object[] newIdEqualsThree = null;
       int j = 0;
       for (int i = 0; i < data.size(); i++) {

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java Sun Mar 16 19:39:10 2014
@@ -25,7 +25,7 @@ import java.util.Properties;
 import org.junit.Test;
 
 public class TestURLDataSource extends AbstractDataImportHandlerTestCase {
-  private List<Map<String, String>> fields = new ArrayList<Map<String, String>>();
+  private List<Map<String, String>> fields = new ArrayList<>();
   private URLDataSource dataSource = new URLDataSource();
   private VariableResolver variableResolver = new VariableResolver();
   private Context context = AbstractDataImportHandlerTestCase.getContext(null, variableResolver,

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java Sun Mar 16 19:39:10 2014
@@ -36,7 +36,7 @@ public class TestVariableResolver extend
   @Test
   public void testSimpleNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace("hello", ns);
     assertEquals("WORLD", vri.resolve("hello.world"));
@@ -61,10 +61,10 @@ public class TestVariableResolver extend
   @Test
   public void testNestedNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace("hello", ns);
-    ns = new HashMap<String,Object>();
+    ns = new HashMap<>();
     ns.put("world1", "WORLD1");
     vri.addNamespace("hello.my", ns);
     assertEquals("WORLD1", vri.resolve("hello.my.world1"));
@@ -73,10 +73,10 @@ public class TestVariableResolver extend
   @Test
   public void test3LevelNestedNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace("hello", ns);
-    ns = new HashMap<String,Object>();
+    ns = new HashMap<>();
     ns.put("world1", "WORLD1");
     vri.addNamespace("hello.my.new", ns);
     assertEquals("WORLD1", vri.resolve("hello.my.new.world1"));
@@ -87,7 +87,7 @@ public class TestVariableResolver extend
     VariableResolver vri = new VariableResolver();
     vri.setEvaluators(new DataImporter().getEvaluators(Collections
         .<Map<String,String>> emptyList()));
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     Date d = new Date();
     ns.put("dt", d);
     vri.addNamespace("A", ns);
@@ -115,7 +115,7 @@ public class TestVariableResolver extend
   @Test
   public void testDefaultNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace(null, ns);
     assertEquals("WORLD", vri.resolve("world"));
@@ -124,7 +124,7 @@ public class TestVariableResolver extend
   @Test
   public void testDefaultNamespace1() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace(null, ns);
     assertEquals("WORLD", vri.resolve("world"));
@@ -133,8 +133,8 @@ public class TestVariableResolver extend
   @Test
   public void testFunctionNamespace1() throws Exception {
     VariableResolver resolver = new VariableResolver();
-    final List<Map<String,String>> l = new ArrayList<Map<String,String>>();
-    Map<String,String> m = new HashMap<String,String>();
+    final List<Map<String,String>> l = new ArrayList<>();
+    Map<String,String> m = new HashMap<>();
     m.put("name", "test");
     m.put("class", E.class.getName());
     l.add(m);

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java Sun Mar 16 19:39:10 2014
@@ -57,7 +57,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(cdData), Context.FULL_DUMP, fields, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)
@@ -80,7 +80,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(testXml), Context.FULL_DUMP, fields, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)
@@ -109,7 +109,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(textMultipleDocuments), Context.FULL_DUMP, fields, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)
@@ -276,7 +276,7 @@ public class TestXPathEntityProcessor ex
     xPathEntityProcessor.blockingQueueTimeOutUnits = TimeUnit.MICROSECONDS;
     
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       if (rowsToRead >= 0 && result.size() >= rowsToRead) {
         Thread.currentThread().interrupt();
@@ -346,7 +346,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(cdData), Context.FULL_DUMP, null, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java Sun Mar 16 19:39:10 2014
@@ -136,8 +136,8 @@ public class TestXPathRecordReader exten
     rr.addField("a", "/root/x/b/@a", false);
     rr.addField("b", "/root/x/b/@b", false);
 
-    final List<Map<String, Object>> a = new ArrayList<Map<String, Object>>();
-    final List<Map<String, Object>> x = new ArrayList<Map<String, Object>>();
+    final List<Map<String, Object>> a = new ArrayList<>();
+    final List<Map<String, Object>> x = new ArrayList<>();
     rr.streamRecords(new StringReader(xml), new XPathRecordReader.Handler() {
       @Override
       public void handle(Map<String, Object> record, String xpath) {

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java Sun Mar 16 19:39:10 2014
@@ -106,11 +106,11 @@ public class TestZKPropertiesWriter exte
     SimpleDateFormat df = new SimpleDateFormat(dateFormat, Locale.ROOT);
     Date oneSecondAgo = new Date(System.currentTimeMillis() - 1000);
 
-    Map<String, String> init = new HashMap<String, String>();
+    Map<String, String> init = new HashMap<>();
     init.put("dateFormat", dateFormat);
     ZKPropertiesWriter spw = new ZKPropertiesWriter();
     spw.init(new DataImporter(h.getCore(), "dataimport"), init);
-    Map<String, Object> props = new HashMap<String, Object>();
+    Map<String, Object> props = new HashMap<>();
     props.put("SomeDates.last_index_time", oneSecondAgo);
     props.put("last_index_time", oneSecondAgo);
     spw.persist(props);

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java Sun Mar 16 19:39:10 2014
@@ -36,17 +36,17 @@ import java.util.Map;
  */
 public class TripleThreatTransformer {
   public Object transformRow(Map<String, Object> row) {
-    List<Map<String, Object>> rows = new ArrayList<Map<String, Object>>(3);
+    List<Map<String, Object>> rows = new ArrayList<>(3);
     rows.add(row);
     rows.add(addDuplicateBackwardsValues(row));
-    rows.add(new LinkedHashMap<String,Object>(row));
+    rows.add(new LinkedHashMap<>(row));
     rows.get(2).put("AddAColumn_s", "Added");
     modifyIdColumn(rows.get(1), 1);
     modifyIdColumn(rows.get(2), 2);
     return rows;
   }
   private LinkedHashMap<String,Object> addDuplicateBackwardsValues(Map<String, Object> row) {
-    LinkedHashMap<String,Object> n = new LinkedHashMap<String,Object>();
+    LinkedHashMap<String,Object> n = new LinkedHashMap<>();
     for(Map.Entry<String,Object> entry : row.entrySet()) {
       String key = entry.getKey();
       if(!"id".equalsIgnoreCase(key)) {

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java Sun Mar 16 19:39:10 2014
@@ -82,7 +82,7 @@ public class ExtractingRequestHandler ex
       }
       NamedList configDateFormats = (NamedList) initArgs.get(DATE_FORMATS);
       if (configDateFormats != null && configDateFormats.size() > 0) {
-        dateFormats = new HashSet<String>();
+        dateFormats = new HashSet<>();
         Iterator<Map.Entry> it = configDateFormats.iterator();
         while (it.hasNext()) {
           String format = (String) it.next().getValue();

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java Sun Mar 16 19:39:10 2014
@@ -41,7 +41,7 @@ import org.slf4j.LoggerFactory;
 public class RegexRulesPasswordProvider implements PasswordProvider {
   private static final Logger log = LoggerFactory.getLogger(RegexRulesPasswordProvider.class);
   
-  private LinkedHashMap<Pattern,String> passwordMap = new LinkedHashMap<Pattern,String>(); 
+  private LinkedHashMap<Pattern,String> passwordMap = new LinkedHashMap<>();
   private String explicitPassword; 
   
   @Override
@@ -72,7 +72,7 @@ public class RegexRulesPasswordProvider 
    * @param is input stream for the file
    */
   public static LinkedHashMap<Pattern,String> parseRulesFile(InputStream is) {
-    LinkedHashMap<Pattern,String> rules = new LinkedHashMap<Pattern,String>();
+    LinkedHashMap<Pattern,String> rules = new LinkedHashMap<>();
     BufferedReader br = new BufferedReader(IOUtils.getDecodingReader(is, IOUtils.CHARSET_UTF_8));
     String line;
     try {

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java Sun Mar 16 19:39:10 2014
@@ -57,7 +57,7 @@ public class SolrContentHandler extends 
   protected StringBuilder catchAllBuilder = new StringBuilder(2048);
   protected IndexSchema schema;
   protected Map<String, StringBuilder> fieldBuilders = Collections.emptyMap();
-  private LinkedList<StringBuilder> bldrStack = new LinkedList<StringBuilder>();
+  private LinkedList<StringBuilder> bldrStack = new LinkedList<>();
 
   protected boolean captureAttribs;
   protected boolean lowerNames;
@@ -89,7 +89,7 @@ public class SolrContentHandler extends 
     this.defaultField = params.get(DEFAULT_FIELD, "");
     String[] captureFields = params.getParams(CAPTURE_ELEMENTS);
     if (captureFields != null && captureFields.length > 0) {
-      fieldBuilders = new HashMap<String, StringBuilder>();
+      fieldBuilders = new HashMap<>();
       for (int i = 0; i < captureFields.length; i++) {
         fieldBuilders.put(captureFields[i], new StringBuilder());
       }
@@ -158,7 +158,7 @@ public class SolrContentHandler extends 
    */
   protected void addLiterals() {
     Iterator<String> paramNames = params.getParameterNamesIterator();
-    literalFieldNames = new HashSet<String>();
+    literalFieldNames = new HashSet<>();
     while (paramNames.hasNext()) {
       String pname = paramNames.next();
       if (!pname.startsWith(LITERALS_PREFIX)) continue;

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java Sun Mar 16 19:39:10 2014
@@ -616,7 +616,7 @@ public class ExtractingRequestHandlerTes
     try {
       // TODO: stop using locally defined streams once stream.file and
       // stream.body work everywhere
-      List<ContentStream> cs = new ArrayList<ContentStream>();
+      List<ContentStream> cs = new ArrayList<>();
       cs.add(new ContentStreamBase.FileStream(getFile(filename)));
       req.setContentStreams(cs);
       return h.queryAndResponse("/update/extract", req);

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java Sun Mar 16 19:39:10 2014
@@ -53,7 +53,7 @@ public class LangDetectLanguageIdentifie
       Detector detector = DetectorFactory.create();
       detector.append(content);
       ArrayList<Language> langlist = detector.getProbabilities();
-      ArrayList<DetectedLanguage> solrLangList = new ArrayList<DetectedLanguage>();
+      ArrayList<DetectedLanguage> solrLangList = new ArrayList<>();
       for (Language l: langlist) {
         solrLangList.add(new DetectedLanguage(l.lang, l.prob));
       }

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java Sun Mar 16 19:39:10 2014
@@ -125,7 +125,7 @@ public class LangDetectLanguageIdentifie
       return;
     }
     loaded = true;
-    List<String> profileData = new ArrayList<String>();
+    List<String> profileData = new ArrayList<>();
     Charset encoding = Charset.forName("UTF-8");
     for (String language : languages) {
       InputStream stream = LangDetectLanguageIdentifierUpdateProcessor.class.getResourceAsStream("langdetect-profiles/" + language);

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java Sun Mar 16 19:39:10 2014
@@ -107,7 +107,7 @@ public abstract class LanguageIdentifier
         fallbackFields = params.get(FALLBACK_FIELDS).split(",");
       }
       overwrite = params.getBool(OVERWRITE, false);
-      langWhitelist = new HashSet<String>();
+      langWhitelist = new HashSet<>();
       threshold = params.getDouble(THRESHOLD, DOCID_THRESHOLD_DEFAULT);
       if(params.get(LANG_WHITELIST, "").length() > 0) {
         for(String lang : params.get(LANG_WHITELIST, "").split(",")) {
@@ -133,15 +133,15 @@ public abstract class LanguageIdentifier
       } else {
         mapIndividualFields = mapFields;
       }
-      mapIndividualFieldsSet = new HashSet<String>(Arrays.asList(mapIndividualFields));
+      mapIndividualFieldsSet = new HashSet<>(Arrays.asList(mapIndividualFields));
       // Compile a union of the lists of fields to map
-      allMapFieldsSet = new HashSet<String>(Arrays.asList(mapFields));
+      allMapFieldsSet = new HashSet<>(Arrays.asList(mapFields));
       if(Arrays.equals(mapFields, mapIndividualFields)) {
         allMapFieldsSet.addAll(mapIndividualFieldsSet);
       }
 
       // Normalize detected langcode onto normalized langcode
-      lcMap = new HashMap<String,String>();
+      lcMap = new HashMap<>();
       if(params.get(LCMAP) != null) {
         for(String mapping : params.get(LCMAP).split("[, ]")) {
           String[] keyVal = mapping.split(":");
@@ -154,7 +154,7 @@ public abstract class LanguageIdentifier
       }
 
       // Language Code mapping
-      mapLcMap = new HashMap<String,String>();
+      mapLcMap = new HashMap<>();
       if(params.get(MAP_LCMAP) != null) {
         for(String mapping : params.get(MAP_LCMAP).split("[, ]")) {
           String[] keyVal = mapping.split(":");
@@ -199,7 +199,7 @@ public abstract class LanguageIdentifier
    */
   protected SolrInputDocument process(SolrInputDocument doc) {
     String docLang = null;
-    HashSet<String> docLangs = new HashSet<String>();
+    HashSet<String> docLangs = new HashSet<>();
     String fallbackLang = getFallbackLang(doc, fallbackFields, fallbackValue);
 
     if(langField == null || !doc.containsKey(langField) || (doc.containsKey(langField) && overwrite)) {
@@ -323,7 +323,7 @@ public abstract class LanguageIdentifier
    * @return a string of the chosen language
    */
   protected String resolveLanguage(String language, String fallbackLang) {
-    List<DetectedLanguage> l = new ArrayList<DetectedLanguage>();
+    List<DetectedLanguage> l = new ArrayList<>();
     l.add(new DetectedLanguage(language, 1.0));
     return resolveLanguage(l, fallbackLang);
   }

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java Sun Mar 16 19:39:10 2014
@@ -41,7 +41,7 @@ public class TikaLanguageIdentifierUpdat
   
   @Override
   protected List<DetectedLanguage> detectLanguage(String content) {
-    List<DetectedLanguage> languages = new ArrayList<DetectedLanguage>();
+    List<DetectedLanguage> languages = new ArrayList<>();
     if(content.trim().length() != 0) { 
       LanguageIdentifier identifier = new LanguageIdentifier(content);
       // FIXME: Hack - we get the distance from toString and calculate our own certainty score

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java Sun Mar 16 19:39:10 2014
@@ -124,7 +124,7 @@ public abstract class LanguageIdentifier
     assertEquals("zh", liProcessor.resolveLanguage("zh_cn", "NA"));
     assertEquals("zh", liProcessor.resolveLanguage("zh_tw", "NA"));
     assertEquals("no", liProcessor.resolveLanguage("no", "NA"));
-    List<DetectedLanguage> langs = new ArrayList<DetectedLanguage>();
+    List<DetectedLanguage> langs = new ArrayList<>();
     langs.add(new DetectedLanguage("zh_cn", 0.8));
     assertEquals("zh", liProcessor.resolveLanguage(langs, "NA"));
   }
@@ -246,7 +246,7 @@ public abstract class LanguageIdentifier
     liProcessor = createLangIdProcessor(parameters);
 
     // No detected languages
-    langs = new ArrayList<DetectedLanguage>();
+    langs = new ArrayList<>();
     assertEquals("", liProcessor.resolveLanguage(langs, null));
     assertEquals("fallback", liProcessor.resolveLanguage(langs, "fallback"));
 
@@ -255,7 +255,7 @@ public abstract class LanguageIdentifier
     assertEquals("one", liProcessor.resolveLanguage(langs, "fallback"));    
 
     // One detected language under default threshold
-    langs = new ArrayList<DetectedLanguage>();
+    langs = new ArrayList<>();
     langs.add(new DetectedLanguage("under", 0.1));
     assertEquals("fallback", liProcessor.resolveLanguage(langs, "fallback"));    
   }

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java Sun Mar 16 19:39:10 2014
@@ -81,7 +81,7 @@ class BatchWriter {
     private UpdateResponse result;
 
     public Batch(Collection<SolrInputDocument> batch) {
-      documents = new ArrayList<SolrInputDocument>(batch);
+      documents = new ArrayList<>(batch);
     }
 
     public void run() {
@@ -111,7 +111,7 @@ class BatchWriter {
 
     protected void reset(List<SolrInputDocument> documents) {
       if (this.documents == null) {
-        this.documents = new ArrayList<SolrInputDocument>(documents);
+        this.documents = new ArrayList<>(documents);
       } else {
         this.documents.clear();
         this.documents.addAll(documents);
@@ -121,7 +121,7 @@ class BatchWriter {
 
     protected void reset(SolrInputDocument document) {
       if (this.documents == null) {
-        this.documents = new ArrayList<SolrInputDocument>();
+        this.documents = new ArrayList<>();
       } else {
         this.documents.clear();
       }
@@ -201,10 +201,10 @@ class BatchWriter {
     context.setStatus("Optimizing Solr");
     int maxSegments = context.getConfiguration().getInt(SolrOutputFormat.SOLR_RECORD_WRITER_MAX_SEGMENTS, 1);
     LOG.info("Optimizing Solr: forcing merge down to {} segments", maxSegments);
-    long start = System.currentTimeMillis();
+    long start = System.nanoTime();
     solr.optimize(true, false, maxSegments);
-    context.getCounter(SolrCounters.class.getName(), SolrCounters.PHYSICAL_REDUCER_MERGE_TIME.toString()).increment(System.currentTimeMillis() - start);
-    float secs = (System.currentTimeMillis() - start) / 1000.0f;
+    context.getCounter(SolrCounters.class.getName(), SolrCounters.PHYSICAL_REDUCER_MERGE_TIME.toString()).increment(System.nanoTime() - start);
+    float secs = (System.nanoTime() - start) / (float)(10^9);
     LOG.info("Optimizing Solr: done forcing merge down to {} segments in {} secs", maxSegments, secs);
     context.setStatus("Committing Solr Phase 2");
     solr.commit(true, false);

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java Sun Mar 16 19:39:10 2014
@@ -52,15 +52,15 @@ class GoLive {
   public boolean goLive(Options options, FileStatus[] outDirs) {
     LOG.info("Live merging of output shards into Solr cluster...");
     boolean success = false;
-    long start = System.currentTimeMillis();
+    long start = System.nanoTime();
     int concurrentMerges = options.goLiveThreads;
     ThreadPoolExecutor executor = new ThreadPoolExecutor(concurrentMerges,
         concurrentMerges, 1, TimeUnit.SECONDS,
         new LinkedBlockingQueue<Runnable>());
     
     try {
-      CompletionService<Request> completionService = new ExecutorCompletionService<Request>(executor);
-      Set<Future<Request>> pending = new HashSet<Future<Request>>();
+      CompletionService<Request> completionService = new ExecutorCompletionService<>(executor);
+      Set<Future<Request>> pending = new HashSet<>();
       int cnt = -1;
       for (final FileStatus dir : outDirs) {
         
@@ -173,7 +173,7 @@ class GoLive {
       return true;
     } finally {
       shutdownNowAndAwaitTermination(executor);
-      float secs = (System.currentTimeMillis() - start) / 1000.0f;
+      float secs = (System.nanoTime() - start) / (float)(10^9);
       LOG.info("Live merging of index shards into Solr cluster took " + secs + " secs");
       if (success) {
         LOG.info("Live merging completed successfully");

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java Sun Mar 16 19:39:10 2014
@@ -40,6 +40,7 @@ import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Random;
+import java.util.concurrent.TimeUnit;
 
 import net.sourceforge.argparse4j.ArgumentParsers;
 import net.sourceforge.argparse4j.impl.Arguments;
@@ -79,8 +80,8 @@ import org.apache.solr.hadoop.morphline.
 import org.apache.solr.hadoop.morphline.MorphlineMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
 import org.kitesdk.morphline.base.Fields;
+
 import com.google.common.base.Charsets;
 import com.google.common.base.Preconditions;
 import com.google.common.io.ByteStreams;
@@ -540,7 +541,7 @@ public class MapReduceIndexerTool extend
 
   static List<List<String>> buildShardUrls(List<Object> urls, Integer numShards) {
     if (urls == null) return null;
-    List<List<String>> shardUrls = new ArrayList<List<String>>(urls.size());
+    List<List<String>> shardUrls = new ArrayList<>(urls.size());
     List<String> list = null;
     
     int sz;
@@ -550,7 +551,7 @@ public class MapReduceIndexerTool extend
     sz = (int) Math.ceil(urls.size() / (float)numShards);
     for (int i = 0; i < urls.size(); i++) {
       if (i % sz == 0) {
-        list = new ArrayList<String>();
+        list = new ArrayList<>();
         shardUrls.add(list);
       }
       list.add((String) urls.get(i));
@@ -613,7 +614,7 @@ public class MapReduceIndexerTool extend
         "which is required for passing files via --files and --libjars");
     }
 
-    long programStartTime = System.currentTimeMillis();
+    long programStartTime = System.nanoTime();
     if (options.fairSchedulerPool != null) {
       getConf().set("mapred.fairscheduler.pool", options.fairSchedulerPool);
     }
@@ -684,7 +685,7 @@ public class MapReduceIndexerTool extend
         
     
     LOG.info("Randomizing list of {} input files to spread indexing load more evenly among mappers", numFiles);
-    long startTime = System.currentTimeMillis();      
+    long startTime = System.nanoTime();      
     if (numFiles < job.getConfiguration().getInt(MAIN_MEMORY_RANDOMIZATION_THRESHOLD, 100001)) {
       // If there are few input files reduce latency by directly running main memory randomization 
       // instead of launching a high latency MapReduce job
@@ -698,7 +699,7 @@ public class MapReduceIndexerTool extend
         return -1; // job failed
       }
     }
-    float secs = (System.currentTimeMillis() - startTime) / 1000.0f;
+    float secs = (System.nanoTime() - startTime) / (float)(10^9);
     LOG.info("Done. Randomizing list of {} input files took {} secs", numFiles, secs);
     
     
@@ -765,9 +766,9 @@ public class MapReduceIndexerTool extend
     MorphlineMapRunner runner = setupMorphline(options);
     if (options.isDryRun && runner != null) {
       LOG.info("Indexing {} files in dryrun mode", numFiles);
-      startTime = System.currentTimeMillis();
+      startTime = System.nanoTime();
       dryRun(runner, fs, fullInputList);
-      secs = (System.currentTimeMillis() - startTime) / 1000.0f;
+      secs = (System.nanoTime() - startTime) / (float)(10^9);
       LOG.info("Done. Indexing {} files in dryrun mode took {} secs", numFiles, secs);
       goodbye(null, programStartTime);
       return 0;
@@ -778,12 +779,12 @@ public class MapReduceIndexerTool extend
     job.setOutputKeyClass(Text.class);
     job.setOutputValueClass(SolrInputDocumentWritable.class);
     LOG.info("Indexing {} files using {} real mappers into {} reducers", new Object[] {numFiles, realMappers, reducers});
-    startTime = System.currentTimeMillis();
+    startTime = System.nanoTime();
     if (!waitForCompletion(job, options.isVerbose)) {
       return -1; // job failed
     }
 
-    secs = (System.currentTimeMillis() - startTime) / 1000.0f;
+    secs = (System.nanoTime() - startTime) / (float)(10^9);
     LOG.info("Done. Indexing {} files using {} real mappers into {} reducers took {} secs", new Object[] {numFiles, realMappers, reducers, secs});
 
     int mtreeMergeIterations = 0;
@@ -816,14 +817,14 @@ public class MapReduceIndexerTool extend
       
       LOG.info("MTree merge iteration {}/{}: Merging {} shards into {} shards using fanout {}", new Object[] { 
           mtreeMergeIteration, mtreeMergeIterations, reducers, (reducers / options.fanout), options.fanout});
-      startTime = System.currentTimeMillis();
+      startTime = System.nanoTime();
       if (!waitForCompletion(job, options.isVerbose)) {
         return -1; // job failed
       }
       if (!renameTreeMergeShardDirs(outputTreeMergeStep, job, fs)) {
         return -1;
       }
-      secs = (System.currentTimeMillis() - startTime) / 1000.0f;
+      secs = (System.nanoTime() - startTime) / (float)(10^9);
       LOG.info("MTree merge iteration {}/{}: Done. Merging {} shards into {} shards using fanout {} took {} secs",
           new Object[] {mtreeMergeIteration, mtreeMergeIterations, reducers, (reducers / options.fanout), options.fanout, secs});
       
@@ -1343,7 +1344,7 @@ public class MapReduceIndexerTool extend
   }
 
   private void goodbye(Job job, long startTime) {
-    float secs = (System.currentTimeMillis() - startTime) / 1000.0f;
+    float secs = (System.nanoTime() - startTime) / (float)(10^9);
     if (job != null) {
       LOG.info("Succeeded with job: " + getJobInfo(job));
     }

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java Sun Mar 16 19:39:10 2014
@@ -158,7 +158,7 @@ public class SolrOutputFormat<K, V> exte
     Utils.getLogConfigFile(context.getConfiguration());
     Path workDir = getDefaultWorkFile(context, "");
     int batchSize = getBatchSize(context.getConfiguration());
-    return new SolrRecordWriter<K, V>(context, workDir, batchSize);
+    return new SolrRecordWriter<>(context, workDir, batchSize);
   }
 
   public static void setupSolrHomeCache(File solrHomeDir, Job job) throws IOException{
@@ -202,7 +202,7 @@ public class SolrOutputFormat<K, V> exte
   }
 
   private static void createZip(File dir, File out) throws IOException {
-    HashSet<File> files = new HashSet<File>();
+    HashSet<File> files = new HashSet<>();
     // take only conf/ and lib/
     for (String allowedDirectory : SolrRecordWriter
         .getAllowedConfigDirectories()) {

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java Sun Mar 16 19:39:10 2014
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Locale;
 import java.util.Properties;
 import java.util.Set;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.filecache.DistributedCache;
@@ -52,10 +53,10 @@ class SolrRecordWriter<K, V> extends Rec
   
   private static final Logger LOG = LoggerFactory.getLogger(SolrRecordWriter.class);
 
-  public final static List<String> allowedConfigDirectories = new ArrayList<String>(
+  public final static List<String> allowedConfigDirectories = new ArrayList<>(
       Arrays.asList(new String[] { "conf", "lib", "solr.xml" }));
 
-  public final static Set<String> requiredConfigDirectories = new HashSet<String>();
+  public final static Set<String> requiredConfigDirectories = new HashSet<>();
   
   static {
     requiredConfigDirectories.add("conf");
@@ -98,9 +99,9 @@ class SolrRecordWriter<K, V> extends Rec
   private final List<SolrInputDocument> batch;
   private final int batchSize;
   private long numDocsWritten = 0;
-  private long nextLogTime = System.currentTimeMillis();
+  private long nextLogTime = System.nanoTime();
 
-  private static HashMap<TaskID, Reducer<?,?,?,?>.Context> contextMap = new HashMap<TaskID, Reducer<?,?,?,?>.Context>();
+  private static HashMap<TaskID, Reducer<?,?,?,?>.Context> contextMap = new HashMap<>();
   
   public SolrRecordWriter(TaskAttemptContext context, Path outputShardDir, int batchSize) {
     this.batchSize = batchSize;
@@ -266,9 +267,9 @@ class SolrRecordWriter<K, V> extends Rec
         if (batch.size() >= batchSize) {
           batchWriter.queueBatch(batch);
           numDocsWritten += batch.size();
-          if (System.currentTimeMillis() >= nextLogTime) {
+          if (System.nanoTime() >= nextLogTime) {
             LOG.info("docsWritten: {}", numDocsWritten);
-            nextLogTime += 10000;
+            nextLogTime += TimeUnit.NANOSECONDS.convert(10, TimeUnit.SECONDS);
           }
           batch.clear();
         }

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/TreeMergeOutputFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/TreeMergeOutputFormat.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/TreeMergeOutputFormat.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/TreeMergeOutputFormat.java Sun Mar 16 19:39:10 2014
@@ -22,6 +22,7 @@ import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.NullWritable;
@@ -133,7 +134,7 @@ public class TreeMergeOutputFormat exten
 
         context.setStatus("Logically merging " + shards.size() + " shards into one shard");
         LOG.info("Logically merging " + shards.size() + " shards into one shard: " + workDir);
-        long start = System.currentTimeMillis();
+        long start = System.nanoTime();
         
         writer.addIndexes(indexes); 
         // TODO: avoid intermediate copying of files into dst directory; rename the files into the dir instead (cp -> rename) 
@@ -143,12 +144,12 @@ public class TreeMergeOutputFormat exten
         if (LOG.isDebugEnabled()) {
           context.getCounter(SolrCounters.class.getName(), SolrCounters.LOGICAL_TREE_MERGE_TIME.toString()).increment(System.currentTimeMillis() - start);
         }
-        float secs = (System.currentTimeMillis() - start) / 1000.0f;
+        float secs = (System.nanoTime() - start) / (float)(10^9);
         LOG.info("Logical merge took {} secs", secs);        
         int maxSegments = context.getConfiguration().getInt(TreeMergeMapper.MAX_SEGMENTS_ON_TREE_MERGE, Integer.MAX_VALUE);
         context.setStatus("Optimizing Solr: forcing mtree merge down to " + maxSegments + " segments");
         LOG.info("Optimizing Solr: forcing tree merge down to {} segments", maxSegments);
-        start = System.currentTimeMillis();
+        start = System.nanoTime();
         if (maxSegments < Integer.MAX_VALUE) {
           writer.forceMerge(maxSegments); 
           // TODO: consider perf enhancement for no-deletes merges: bulk-copy the postings data 
@@ -157,13 +158,13 @@ public class TreeMergeOutputFormat exten
         if (LOG.isDebugEnabled()) {
           context.getCounter(SolrCounters.class.getName(), SolrCounters.PHYSICAL_TREE_MERGE_TIME.toString()).increment(System.currentTimeMillis() - start);
         }
-        secs = (System.currentTimeMillis() - start) / 1000.0f;
+        secs = (System.nanoTime() - start) / (float)(10^9);
         LOG.info("Optimizing Solr: done forcing tree merge down to {} segments in {} secs", maxSegments, secs);
         
-        start = System.currentTimeMillis();
+        start = System.nanoTime();
         LOG.info("Optimizing Solr: Closing index writer");
         writer.close();
-        secs = (System.currentTimeMillis() - start) / 1000.0f;
+        secs = (System.nanoTime() - start) / (float)(10^9);
         LOG.info("Optimizing Solr: Done closing index writer in {} secs", secs);
         context.setStatus("Done");
       } finally {

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java Sun Mar 16 19:39:10 2014
@@ -55,14 +55,14 @@ final class ZooKeeperInspector {
 
     DocCollection docCollection = extractDocCollection(zkHost, collection);
     List<Slice> slices = getSortedSlices(docCollection.getSlices());
-    List<List<String>> solrUrls = new ArrayList<List<String>>(slices.size());
+    List<List<String>> solrUrls = new ArrayList<>(slices.size());
     for (Slice slice : slices) {
       if (slice.getLeader() == null) {
         throw new IllegalArgumentException("Cannot find SolrCloud slice leader. " +
             "It looks like not all of your shards are registered in ZooKeeper yet");
       }
       Collection<Replica> replicas = slice.getReplicas();
-      List<String> urls = new ArrayList<String>(replicas.size());
+      List<String> urls = new ArrayList<>(replicas.size());
       for (Replica replica : replicas) {
         ZkCoreNodeProps props = new ZkCoreNodeProps(replica);
         urls.add(props.getCoreUrl());

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java Sun Mar 16 19:39:10 2014
@@ -49,12 +49,12 @@ public class LineRandomizerMapperReducer
   @Test
   public void testMapReduce2Items() throws IOException {
     mapReduceDriver.withAll(Arrays.asList(
-        new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
-        new Pair<LongWritable, Text>(new LongWritable(1), new Text("world"))
+        new Pair<>(new LongWritable(0), new Text("hello")),
+        new Pair<>(new LongWritable(1), new Text("world"))
         ));
     mapReduceDriver.withAllOutput(Arrays.asList(
-        new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())
+        new Pair<>(new Text("world"), NullWritable.get()),
+        new Pair<>(new Text("hello"), NullWritable.get())
         ));
     mapReduceDriver.runTest();
   }
@@ -62,14 +62,14 @@ public class LineRandomizerMapperReducer
   @Test
   public void testMapReduce3Items() throws IOException {
     mapReduceDriver.withAll(Arrays.asList(
-        new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
-        new Pair<LongWritable, Text>(new LongWritable(1), new Text("world")),
-        new Pair<LongWritable, Text>(new LongWritable(2), new Text("nadja"))
+        new Pair<>(new LongWritable(0), new Text("hello")),
+        new Pair<>(new LongWritable(1), new Text("world")),
+        new Pair<>(new LongWritable(2), new Text("nadja"))
         ));
     mapReduceDriver.withAllOutput(Arrays.asList(
-        new Pair<Text, NullWritable>(new Text("nadja"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())
+        new Pair<>(new Text("nadja"), NullWritable.get()),
+        new Pair<>(new Text("world"), NullWritable.get()),
+        new Pair<>(new Text("hello"), NullWritable.get())
         ));
     mapReduceDriver.runTest();
   }
@@ -77,16 +77,16 @@ public class LineRandomizerMapperReducer
   @Test
   public void testMapReduce4Items() throws IOException {
     mapReduceDriver.withAll(Arrays.asList(
-        new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
-        new Pair<LongWritable, Text>(new LongWritable(1), new Text("world")),
-        new Pair<LongWritable, Text>(new LongWritable(2), new Text("nadja")),
-        new Pair<LongWritable, Text>(new LongWritable(3), new Text("basti"))
+        new Pair<>(new LongWritable(0), new Text("hello")),
+        new Pair<>(new LongWritable(1), new Text("world")),
+        new Pair<>(new LongWritable(2), new Text("nadja")),
+        new Pair<>(new LongWritable(3), new Text("basti"))
         ));
     mapReduceDriver.withAllOutput(Arrays.asList(
-        new Pair<Text, NullWritable>(new Text("nadja"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("basti"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())
+        new Pair<>(new Text("nadja"), NullWritable.get()),
+        new Pair<>(new Text("world"), NullWritable.get()),
+        new Pair<>(new Text("basti"), NullWritable.get()),
+        new Pair<>(new Text("hello"), NullWritable.get())
         ));
     mapReduceDriver.runTest();
   }

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java Sun Mar 16 19:39:10 2014
@@ -245,7 +245,7 @@ public class MorphlineGoLiveMiniMRTest e
   public void testBuildShardUrls() throws Exception {
     // 2x3
     Integer numShards = 2;
-    List<Object> urls = new ArrayList<Object>();
+    List<Object> urls = new ArrayList<>();
     urls.add("shard1");
     urls.add("shard2");
     urls.add("shard3");
@@ -303,7 +303,7 @@ public class MorphlineGoLiveMiniMRTest e
     // null shards 3x1
     numShards = null;
     
-    urls = new ArrayList<Object>();
+    urls = new ArrayList<>();
     urls.add("shard1");
     urls.add("shard2");
     urls.add("shard3");
@@ -318,7 +318,7 @@ public class MorphlineGoLiveMiniMRTest e
     
     // 2x(2,3) off balance
     numShards = 2;
-    urls = new ArrayList<Object>();
+    urls = new ArrayList<>();
     urls.add("shard1");
     urls.add("shard2");
     urls.add("shard3");
@@ -328,7 +328,7 @@ public class MorphlineGoLiveMiniMRTest e
 
     assertEquals(shardUrls.toString(), 2, shardUrls.size());
     
-    Set<Integer> counts = new HashSet<Integer>();
+    Set<Integer> counts = new HashSet<>();
     counts.add(shardUrls.get(0).size());
     counts.add(shardUrls.get(1).size());
     
@@ -388,7 +388,7 @@ public class MorphlineGoLiveMiniMRTest e
         "--go-live"
     };
     args = prependInitialArgs(args);
-    List<String> argList = new ArrayList<String>();
+    List<String> argList = new ArrayList<>();
     getShardUrlArgs(argList);
     args = concat(args, argList.toArray(new String[0]));
     
@@ -418,7 +418,7 @@ public class MorphlineGoLiveMiniMRTest e
         "--go-live-threads", Integer.toString(random().nextInt(15) + 1)
     };
     args = prependInitialArgs(args);
-    argList = new ArrayList<String>();
+    argList = new ArrayList<>();
     getShardUrlArgs(argList);
     args = concat(args, argList.toArray(new String[0]));
     
@@ -608,7 +608,7 @@ public class MorphlineGoLiveMiniMRTest e
     };
     args = prependInitialArgs(args);
 
-    argList = new ArrayList<String>();
+    argList = new ArrayList<>();
     getShardUrlArgs(argList, replicatedCollection);
     args = concat(args, argList.toArray(new String[0]));
     

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java Sun Mar 16 19:39:10 2014
@@ -106,7 +106,7 @@ public class MorphlineReducerTest extend
       Configuration config = reduceDriver.getConfiguration();
       setupHadoopConfig(config);
       
-      List<SolrInputDocumentWritable> values = new ArrayList<SolrInputDocumentWritable>();
+      List<SolrInputDocumentWritable> values = new ArrayList<>();
       SolrInputDocument sid = new SolrInputDocument();
       String id = "myid1";
       sid.addField("id", id);

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java Sun Mar 16 19:39:10 2014
@@ -151,7 +151,7 @@ public final class SolrCellBuilder imple
         cellParams.put(ExtractingParams.XPATH_EXPRESSION, xpathExpr);
       }
       
-      this.dateFormats = getConfigs().getStringList(config, "dateFormats", new ArrayList<String>(DateUtil.DEFAULT_DATE_FORMATS));
+      this.dateFormats = getConfigs().getStringList(config, "dateFormats", new ArrayList<>(DateUtil.DEFAULT_DATE_FORMATS));
       
       String handlerStr = getConfigs().getString(config, "solrContentHandlerFactory", TrimSolrContentHandlerFactory.class.getName());
       Class<? extends SolrContentHandlerFactory> factoryClass;
@@ -165,7 +165,7 @@ public final class SolrCellBuilder imple
 
       this.locale = getLocale(getConfigs().getString(config, "locale", ""));
       
-      this.mediaTypeToParserMap = new HashMap<MediaType, Parser>();
+      this.mediaTypeToParserMap = new HashMap<>();
       //MimeTypes mimeTypes = MimeTypes.getDefaultMimeTypes(); // FIXME getMediaTypeRegistry.normalize() 
 
       List<? extends Config> parserConfigs = getConfigs().getConfigList(config, "parsers");

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java Sun Mar 16 19:39:10 2014
@@ -37,8 +37,8 @@ import org.junit.Test;
 
 public class SolrCellMorphlineTest extends AbstractSolrMorphlineTestBase {
 
-  private Map<String,Integer> expectedRecords = new HashMap<String,Integer>();
-  private Map<String, Map<String, Object>> expectedRecordContents = new HashMap<String, Map<String, Object>>();
+  private Map<String,Integer> expectedRecords = new HashMap<>();
+  private Map<String, Map<String, Object>> expectedRecordContents = new HashMap<>();
   @BeforeClass
   public static void beforeClass2() {
     assumeFalse("FIXME: Morphlines currently has issues with Windows paths", Constants.WINDOWS);

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java Sun Mar 16 19:39:10 2014
@@ -31,8 +31,8 @@ import org.slf4j.LoggerFactory;
 class CollectingDocumentLoader implements DocumentLoader {
 
   private final int batchSize;
-  private final List<SolrInputDocument> batch = new ArrayList<SolrInputDocument> ();
-  private List<SolrInputDocument> results = new ArrayList<SolrInputDocument> ();
+  private final List<SolrInputDocument> batch = new ArrayList<> ();
+  private List<SolrInputDocument> results = new ArrayList<> ();
 
   private static final Logger LOGGER = LoggerFactory.getLogger(CollectingDocumentLoader.class);
 

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java Sun Mar 16 19:39:10 2014
@@ -65,7 +65,7 @@ public class SolrUIMAConfigurationReader
 
   @SuppressWarnings("rawtypes")
   private Map<String, Map<String, MapField>> readTypesFeaturesFieldsMapping() {
-    Map<String, Map<String, MapField>> map = new HashMap<String, Map<String, MapField>>();
+    Map<String, Map<String, MapField>> map = new HashMap<>();
 
     NamedList fieldMappings = (NamedList) args.get("fieldMappings");
     /* iterate over UIMA types */
@@ -73,7 +73,7 @@ public class SolrUIMAConfigurationReader
       NamedList type = (NamedList) fieldMappings.get("type", i);
       String typeName = (String)type.get("name");
 
-      Map<String, MapField> subMap = new HashMap<String, MapField>();
+      Map<String, MapField> subMap = new HashMap<>();
       /* iterate over mapping definitions */
       for(int j = 0; j < type.size() - 1; j++){
         NamedList mapping = (NamedList) type.get("mapping", j + 1);
@@ -96,7 +96,7 @@ public class SolrUIMAConfigurationReader
 
   @SuppressWarnings("rawtypes")
   private Map<String, Object> readAEOverridingParameters() {
-    Map<String, Object> runtimeParameters = new HashMap<String, Object>();
+    Map<String, Object> runtimeParameters = new HashMap<>();
     NamedList runtimeParams = (NamedList) args.get("runtimeParameters");
     for (int i = 0; i < runtimeParams.size(); i++) {
       String name = runtimeParams.getName(i);

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java Sun Mar 16 19:39:10 2014
@@ -192,7 +192,7 @@ public class UIMAUpdateRequestProcessorT
   }
 
   private void addDoc(String chain, String doc) throws Exception {
-    Map<String, String[]> params = new HashMap<String, String[]>();
+    Map<String, String[]> params = new HashMap<>();
     params.put(UpdateParams.UPDATE_CHAIN, new String[] { chain });
     MultiMapSolrParams mmparams = new MultiMapSolrParams(params);
     SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), (SolrParams) mmparams) {
@@ -200,7 +200,7 @@ public class UIMAUpdateRequestProcessorT
 
     UpdateRequestHandler handler = new UpdateRequestHandler();
     handler.init(null);
-    ArrayList<ContentStream> streams = new ArrayList<ContentStream>(2);
+    ArrayList<ContentStream> streams = new ArrayList<>(2);
     streams.add(new ContentStreamBase.StringStream(doc));
     req.setContentStreams(streams);
     handler.handleRequestBody(req, new SolrQueryResponse());

Modified: lucene/dev/branches/lucene5376_2/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java Sun Mar 16 19:39:10 2014
@@ -31,7 +31,7 @@ import java.util.Iterator;
 import java.util.Map;
 
 public class SolrParamResourceLoader extends ResourceLoader {
-  private Map<String,String> templates = new HashMap<String,String>();
+  private Map<String,String> templates = new HashMap<>();
   public SolrParamResourceLoader(SolrQueryRequest request) {
     super();
 

Modified: lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/SolrLogFormatter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/SolrLogFormatter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/SolrLogFormatter.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/SolrLogFormatter.java Sun Mar 16 19:39:10 2014
@@ -47,7 +47,7 @@ public class SolrLogFormatter extends Fo
 
   long startTime = System.currentTimeMillis();
   long lastTime = startTime;
-  Map<Method, String> methodAlias = new HashMap<Method, String>();
+  Map<Method, String> methodAlias = new HashMap<>();
   
   public static class Method {
     public String className;
@@ -106,9 +106,9 @@ public class SolrLogFormatter extends Fo
     Map<String, Object> coreProps;
   }
 
-  Map<SolrCore, CoreInfo> coreInfoMap = new WeakHashMap<SolrCore, CoreInfo>();    // TODO: use something that survives across a core reload?
+  Map<SolrCore, CoreInfo> coreInfoMap = new WeakHashMap<>();    // TODO: use something that survives across a core reload?
 
-  public Map<String,String> classAliases = new HashMap<String, String>();
+  public Map<String,String> classAliases = new HashMap<>();
 
   @Override
   public String format(LogRecord record) {
@@ -379,7 +379,7 @@ sb.append("(group_name=").append(tg.getN
 
 
 
-  static ThreadLocal<String> threadLocal = new ThreadLocal<String>();
+  static ThreadLocal<String> threadLocal = new ThreadLocal<>();
   
   public static void main(String[] args) throws Exception {
 

Modified: lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java Sun Mar 16 19:39:10 2014
@@ -775,7 +775,7 @@ public class LegacyHTMLStripCharFilter e
 
   private static final HashMap<String,Character> entityTable;
   static {
-    entityTable = new HashMap<String,Character>();
+    entityTable = new HashMap<>();
     // entityName and entityVal generated from the python script
     // included in comments at the end of this file.
     final String[] entityName={ "zwnj","aring","gt","yen","ograve","Chi","delta","rang","sup","trade","Ntilde","xi","upsih","nbsp","Atilde","radic","otimes","aelig","oelig","equiv","ni","infin","Psi","auml","cup","Epsilon","otilde","lt","Icirc","Eacute","Lambda","sbquo","Prime","prime","psi","Kappa","rsaquo","Tau","uacute","ocirc","lrm","zwj","cedil","Alpha","not","amp","AElig","oslash","acute","lceil","alefsym","laquo","shy","loz","ge","Igrave","nu","Ograve","lsaquo","sube","euro","rarr","sdot","rdquo","Yacute","lfloor","lArr","Auml","Dagger","brvbar","Otilde","szlig","clubs","diams","agrave","Ocirc","Iota","Theta","Pi","zeta","Scaron","frac14","egrave","sub","iexcl","frac12","ordf","sum","prop","Uuml","ntilde","atilde","asymp","uml","prod","nsub","reg","rArr","Oslash","emsp","THORN","yuml","aacute","Mu","hArr","le","thinsp","dArr","ecirc","bdquo","Sigma","Aring","tilde","nabla","mdash","uarr","times","Ugrave","Eta","Agrave","chi","real","circ","eth","rceil","iuml","gamma","lambda"
 ,"harr","Egrave","frac34","dagger","divide","Ouml","image","ndash","hellip","igrave","Yuml","ang","alpha","frasl","ETH","lowast","Nu","plusmn","bull","sup1","sup2","sup3","Aacute","cent","oline","Beta","perp","Delta","there4","pi","iota","empty","euml","notin","iacute","para","epsilon","weierp","OElig","uuml","larr","icirc","Upsilon","omicron","upsilon","copy","Iuml","Oacute","Xi","kappa","ccedil","Ucirc","cap","mu","scaron","lsquo","isin","Zeta","minus","deg","and","tau","pound","curren","int","ucirc","rfloor","ensp","crarr","ugrave","exist","cong","theta","oplus","permil","Acirc","piv","Euml","Phi","Iacute","quot","Uacute","Omicron","ne","iquest","eta","rsquo","yacute","Rho","darr","Ecirc","Omega","acirc","sim","phi","sigmaf","macr","thetasym","Ccedil","ordm","uArr","forall","beta","fnof","rho","micro","eacute","omega","middot","Gamma","rlm","lang","spades","supe","thorn","ouml","or","raquo","part","sect","ldquo","hearts","sigma","oacute"};

Modified: lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java Sun Mar 16 19:39:10 2014
@@ -103,7 +103,7 @@ public class BasicAccumulator extends Va
   }
   
   public NamedList<?> export(){
-    NamedList<Object> base = new NamedList<Object>();
+    NamedList<Object> base = new NamedList<>();
     for (int count = 0; count < expressions.length; count++) {
       if (!hiddenExpressions.contains(expressionNames[count])) {
         base.add(expressionNames[count], expressions[count].getValue());

Modified: lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java?rev=1578144&r1=1578143&r2=1578144&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java (original)
+++ lucene/dev/branches/lucene5376_2/solr/core/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java Sun Mar 16 19:39:10 2014
@@ -98,14 +98,14 @@ public class FacetingAccumulator extends
     List<RangeFacetRequest> rangeFreqs = request.getRangeFacets();
     List<QueryFacetRequest> queryFreqs = request.getQueryFacets();
 
-    this.fieldFacetExpressions = new LinkedHashMap<String,Map<String,Expression[]>>(fieldFreqs.size());
-    this.rangeFacetExpressions = new LinkedHashMap<String,Map<String,Expression[]>>(rangeFreqs.size());
-    this.queryFacetExpressions = new LinkedHashMap<String,Map<String,Expression[]>>(queryFreqs.size());
-    this.fieldFacetCollectors = new LinkedHashMap<String,Map<String,StatsCollector[]>>(fieldFreqs.size());
-    this.rangeFacetCollectors = new LinkedHashMap<String,Map<String,StatsCollector[]>>(rangeFreqs.size());
-    this.queryFacetCollectors = new LinkedHashMap<String,Map<String,StatsCollector[]>>(queryFreqs.size());
-    this.facetAccumulators = new ArrayList<FieldFacetAccumulator>();
-    this.hiddenFieldFacets = new HashSet<String>();
+    this.fieldFacetExpressions = new LinkedHashMap<>(fieldFreqs.size());
+    this.rangeFacetExpressions = new LinkedHashMap<>(rangeFreqs.size());
+    this.queryFacetExpressions = new LinkedHashMap<>(queryFreqs.size());
+    this.fieldFacetCollectors = new LinkedHashMap<>(fieldFreqs.size());
+    this.rangeFacetCollectors = new LinkedHashMap<>(rangeFreqs.size());
+    this.queryFacetCollectors = new LinkedHashMap<>(queryFreqs.size());
+    this.facetAccumulators = new ArrayList<>();
+    this.hiddenFieldFacets = new HashSet<>();
     
     /**
      * For each field facet request add a bucket to the {@link Expression} map and {@link StatsCollector} map.
@@ -130,13 +130,13 @@ public class FacetingAccumulator extends
      * are not created initially.
      */
     for( RangeFacetRequest freq : rangeFreqs ){
-      if( rangeFacets == null ) rangeFacets = new ArrayList<RangeFacetRequest>();
+      if( rangeFacets == null ) rangeFacets = new ArrayList<>();
       rangeFacets.add(freq);
       rangeFacetExpressions.put(freq.getName(), new LinkedHashMap<String,Expression[]>() );
       rangeFacetCollectors.put(freq.getName(), new LinkedHashMap<String,StatsCollector[]>());
     }
     for( QueryFacetRequest freq : queryFreqs ){
-      if( queryFacets == null ) queryFacets = new ArrayList<QueryFacetRequest>();
+      if( queryFacets == null ) queryFacets = new ArrayList<>();
       queryFacets.add(freq);
       queryFacetExpressions.put(freq.getName(), new LinkedHashMap<String,Expression[]>() );
       queryFacetCollectors.put(freq.getName(), new LinkedHashMap<String,StatsCollector[]>());
@@ -442,7 +442,7 @@ public class FacetingAccumulator extends
   @SuppressWarnings("unchecked")
   public NamedList<?> export() {
     final NamedList<Object> base = (NamedList<Object>)super.export();
-    NamedList<NamedList<?>> facetList = new NamedList<NamedList<?>>();
+    NamedList<NamedList<?>> facetList = new NamedList<>();
     
     // Add the field facet buckets to the output
     base.add("fieldFacets",facetList);
@@ -452,7 +452,7 @@ public class FacetingAccumulator extends
         continue;
       }
       final Map<String,Expression[]> buckets = fieldFacetExpressions.get(name);
-      final NamedList<Object> bucketBase = new NamedList<Object>();
+      final NamedList<Object> bucketBase = new NamedList<>();
 
       Iterable<Entry<String,Expression[]>> iter = buckets.entrySet();
       
@@ -471,7 +471,7 @@ public class FacetingAccumulator extends
         final Expression first = buckets.values().iterator().next()[sortPlace];
         final Comparator<Expression> comp = (Comparator<Expression>) first.comparator(sort.getDirection());
         
-        final List<Entry<String,Expression[]>> sorted = new ArrayList<Entry<String,Expression[]>>(buckets.size());
+        final List<Entry<String,Expression[]>> sorted = new ArrayList<>(buckets.size());
         Iterables.addAll(sorted, iter);
         Collections.sort(sorted, new EntryComparator(comp,sortPlace));
         iter = sorted;
@@ -493,12 +493,12 @@ public class FacetingAccumulator extends
     }
 
     // Add the range facet buckets to the output
-    facetList = new NamedList<NamedList<?>>();
+    facetList = new NamedList<>();
     base.add("rangeFacets",facetList);
     for( RangeFacetRequest freq : request.getRangeFacets() ){
       final String name = freq.getName();
       final Map<String,Expression[]> buckets = rangeFacetExpressions.get(name);
-      final NamedList<Object> bucketBase = new NamedList<Object>();
+      final NamedList<Object> bucketBase = new NamedList<>();
 
       Iterable<Entry<String,Expression[]>> iter = buckets.entrySet();
       
@@ -510,12 +510,12 @@ public class FacetingAccumulator extends
     }
     
     // Add the query facet buckets to the output
-    facetList = new NamedList<NamedList<?>>();
+    facetList = new NamedList<>();
     base.add("queryFacets",facetList);
     for( QueryFacetRequest freq : request.getQueryFacets() ){
       final String name = freq.getName();
       final Map<String,Expression[]> buckets = queryFacetExpressions.get(name);
-      final NamedList<Object> bucketBase = new NamedList<Object>();
+      final NamedList<Object> bucketBase = new NamedList<>();
 
       Iterable<Entry<String,Expression[]>> iter = buckets.entrySet();
       
@@ -535,7 +535,7 @@ public class FacetingAccumulator extends
    * @return named list of expressions
    */
   public NamedList<?> export(Expression[] expressionArr) {
-    NamedList<Object> base = new NamedList<Object>();
+    NamedList<Object> base = new NamedList<>();
     for (int count = 0; count < expressionArr.length; count++) {
       if (!hiddenExpressions.contains(expressionNames[count])) {
         base.add(expressionNames[count], expressionArr[count].getValue());