You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2014/03/12 15:39:39 UTC

svn commit: r1576755 [16/27] - in /lucene/dev/trunk: lucene/ lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/ lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/ lucene/analysis/common/src/java/org/apache/lucene/a...

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestRegexTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestRegexTransformer.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestRegexTransformer.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestRegexTransformer.java Wed Mar 12 14:39:17 2014
@@ -38,12 +38,12 @@ public class TestRegexTransformer extend
 
   @Test
   public void testCommaSeparated() {
-    List<Map<String, String>> fields = new ArrayList<Map<String, String>>();
+    List<Map<String, String>> fields = new ArrayList<>();
     // <field column="col1" sourceColName="a" splitBy="," />
     fields.add(getField("col1", "string", null, "a", ","));
     Context context = getContext(null, null, null, Context.FULL_DUMP, fields, null);
 
-    Map<String, Object> src = new HashMap<String, Object>();
+    Map<String, Object> src = new HashMap<>();
     src.put("a", "a,bb,cc,d");
 
     Map<String, Object> result = new RegexTransformer().transformRow(src, context);
@@ -54,21 +54,21 @@ public class TestRegexTransformer extend
 
   @Test
   public void testGroupNames() {
-    List<Map<String, String>> fields = new ArrayList<Map<String, String>>();
+    List<Map<String, String>> fields = new ArrayList<>();
     // <field column="col1" regex="(\w*)(\w*) (\w*)" groupNames=",firstName,lastName"/>
-    Map<String ,String > m = new HashMap<String, String>();
+    Map<String ,String > m = new HashMap<>();
     m.put(COLUMN,"fullName");
     m.put(GROUP_NAMES,",firstName,lastName");
     m.put(REGEX,"(\\w*) (\\w*) (\\w*)");
     fields.add(m);
     Context context = getContext(null, null, null, Context.FULL_DUMP, fields, null);
-    Map<String, Object> src = new HashMap<String, Object>();
+    Map<String, Object> src = new HashMap<>();
     src.put("fullName", "Mr Noble Paul");
 
     Map<String, Object> result = new RegexTransformer().transformRow(src, context);
     assertEquals("Noble", result.get("firstName"));
     assertEquals("Paul", result.get("lastName"));
-    src= new HashMap<String, Object>();
+    src= new HashMap<>();
     List<String> l= new ArrayList();
     l.add("Mr Noble Paul") ;
     l.add("Mr Shalin Mangar") ;
@@ -84,14 +84,14 @@ public class TestRegexTransformer extend
 
   @Test
   public void testReplaceWith() {
-    List<Map<String, String>> fields = new ArrayList<Map<String, String>>();
+    List<Map<String, String>> fields = new ArrayList<>();
     // <field column="name" regexp="'" replaceWith="''" />
     Map<String, String> fld = getField("name", "string", "'", null, null);
     fld.put(REPLACE_WITH, "''");
     fields.add(fld);
     Context context = getContext(null, null, null, Context.FULL_DUMP, fields, null);
 
-    Map<String, Object> src = new HashMap<String, Object>();
+    Map<String, Object> src = new HashMap<>();
     String s = "D'souza";
     src.put("name", s);
 
@@ -130,7 +130,7 @@ public class TestRegexTransformer extend
     fld.put(GROUP_NAMES,"t4,t5");
     fields.add(fld);
 
-    Map<String, Object> row = new HashMap<String, Object>();
+    Map<String, Object> row = new HashMap<>();
     String s = "Fuel Economy Range: 26 mpg Hwy, 19 mpg City";
     row.put("rowdata", s);
 
@@ -150,14 +150,14 @@ public class TestRegexTransformer extend
 
   @Test
   public void testMultiValuedRegex(){
-      List<Map<String, String>> fields = new ArrayList<Map<String, String>>();
+      List<Map<String, String>> fields = new ArrayList<>();
 //    <field column="participant" sourceColName="person" regex="(.*)" />
     Map<String, String> fld = getField("participant", null, "(.*)", "person", null);
     fields.add(fld);
     Context context = getContext(null, null,
             null, Context.FULL_DUMP, fields, null);
 
-    ArrayList<String> strings = new ArrayList<String>();
+    ArrayList<String> strings = new ArrayList<>();
     strings.add("hello");
     strings.add("world");
     Map<String, Object> result = new RegexTransformer().transformRow(createMap("person", strings), context);
@@ -165,7 +165,7 @@ public class TestRegexTransformer extend
   }
 
   public static List<Map<String, String>> getFields() {
-    List<Map<String, String>> fields = new ArrayList<Map<String, String>>();
+    List<Map<String, String>> fields = new ArrayList<>();
 
     // <field column="city_mileage" sourceColName="rowdata" regexp=
     //    "Fuel Economy Range:\\s*?\\d*?\\s*?mpg Hwy,\\s*?(\\d*?)\\s*?mpg City"

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestScriptTransformer.java Wed Mar 12 14:39:17 2014
@@ -48,7 +48,7 @@ public class TestScriptTransformer exten
       String script = "function f1(row,context){"
               + "row.put('name','Hello ' + row.get('name'));" + "return row;\n" + "}";
       Context context = getContext("f1", script);
-      Map<String, Object> map = new HashMap<String, Object>();
+      Map<String, Object> map = new HashMap<>();
       map.put("name", "Scott");
       EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null, null);
       sep.init(context);
@@ -62,8 +62,8 @@ public class TestScriptTransformer exten
   }
 
   private Context getContext(String funcName, String script) {
-    List<Map<String, String>> fields = new ArrayList<Map<String, String>>();
-    Map<String, String> entity = new HashMap<String, String>();
+    List<Map<String, String>> fields = new ArrayList<>();
+    Map<String, String> entity = new HashMap<>();
     entity.put("name", "hello");
     entity.put("transformer", "script:" + funcName);
 
@@ -81,7 +81,7 @@ public class TestScriptTransformer exten
               + "row.put('name','Hello ' + row.get('name'));" + "return row;\n" + "}";
 
       Context context = getContext("f1", script);
-      Map<String, Object> map = new HashMap<String, Object>();
+      Map<String, Object> map = new HashMap<>();
       map.put("name", "Scott");
       EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null, null);
       sep.init(context);

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java Wed Mar 12 14:39:17 2014
@@ -84,13 +84,13 @@ public class TestSimplePropertiesWriter 
       SimpleDateFormat df = new SimpleDateFormat(dateFormat, Locale.ROOT);
       Date oneSecondAgo = new Date(System.currentTimeMillis() - 1000);
       
-      Map<String,String> init = new HashMap<String,String>();
+      Map<String,String> init = new HashMap<>();
       init.put("dateFormat", dateFormat);
       init.put("filename", fileName);
       init.put("directory", fileLocation);
       SimplePropertiesWriter spw = new SimplePropertiesWriter();
       spw.init(new DataImporter(), init);
-      Map<String, Object> props = new HashMap<String,Object>();
+      Map<String, Object> props = new HashMap<>();
       props.put("SomeDates.last_index_time", oneSecondAgo);
       props.put("last_index_time", oneSecondAgo);
       spw.persist(props);

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java Wed Mar 12 14:39:17 2014
@@ -53,17 +53,17 @@ public class TestSolrEntityProcessorEndT
 
   private static final String DEAD_SOLR_SERVER = "http://[ff01::114]:33332/solr";
   
-  private static final List<Map<String,Object>> DB_DOCS = new ArrayList<Map<String,Object>>();
-  private static final List<Map<String,Object>> SOLR_DOCS = new ArrayList<Map<String,Object>>();
+  private static final List<Map<String,Object>> DB_DOCS = new ArrayList<>();
+  private static final List<Map<String,Object>> SOLR_DOCS = new ArrayList<>();
   
   static {
     // dynamic fields in the destination schema
-    Map<String,Object> dbDoc = new HashMap<String,Object>();
+    Map<String,Object> dbDoc = new HashMap<>();
     dbDoc.put("dbid_s", "1");
     dbDoc.put("dbdesc_s", "DbDescription");
     DB_DOCS.add(dbDoc);
 
-    Map<String,Object> solrDoc = new HashMap<String,Object>();
+    Map<String,Object> solrDoc = new HashMap<>();
     solrDoc.put("id", "1");
     solrDoc.put("desc", "SolrDescription");
     SOLR_DOCS.add(solrDoc);
@@ -161,7 +161,7 @@ public class TestSolrEntityProcessorEndT
     
     try {
       addDocumentsToSolr(generateSolrDocuments(30));
-      Map<String,String> map = new HashMap<String,String>();
+      Map<String,String> map = new HashMap<>();
       map.put("rows", "50");
       runFullImport(generateDIHConfig("query='*:*' fq='desc:Description1*,desc:Description*2' rows='2'", false), map);
     } catch (Exception e) {
@@ -202,15 +202,15 @@ public class TestSolrEntityProcessorEndT
     assertQ(req("*:*"), "//result[@numFound='0']");
     
     try {
-      List<Map<String,Object>> DOCS = new ArrayList<Map<String,Object>>(DB_DOCS);
-      Map<String, Object> doc = new HashMap<String, Object>();
+      List<Map<String,Object>> DOCS = new ArrayList<>(DB_DOCS);
+      Map<String, Object> doc = new HashMap<>();
       doc.put("dbid_s", "2");
       doc.put("dbdesc_s", "DbDescription2");
       DOCS.add(doc);
       MockDataSource.setIterator("select * from x", DOCS.iterator());
 
-      DOCS = new ArrayList<Map<String,Object>>(SOLR_DOCS);
-      Map<String,Object> solrDoc = new HashMap<String,Object>();
+      DOCS = new ArrayList<>(SOLR_DOCS);
+      Map<String,Object> solrDoc = new HashMap<>();
       solrDoc.put("id", "2");
       solrDoc.put("desc", "SolrDescription2");
       DOCS.add(solrDoc);
@@ -261,9 +261,9 @@ public class TestSolrEntityProcessorEndT
   }
     
   private static List<Map<String,Object>> generateSolrDocuments(int num) {
-    List<Map<String,Object>> docList = new ArrayList<Map<String,Object>>();
+    List<Map<String,Object>> docList = new ArrayList<>();
     for (int i = 1; i <= num; i++) {
-      Map<String,Object> map = new HashMap<String,Object>();
+      Map<String,Object> map = new HashMap<>();
       map.put("id", i);
       map.put("desc", "Description" + i);
       docList.add(map);
@@ -272,7 +272,7 @@ public class TestSolrEntityProcessorEndT
   }
   
   private void addDocumentsToSolr(List<Map<String,Object>> docs) throws SolrServerException, IOException {
-    List<SolrInputDocument> sidl = new ArrayList<SolrInputDocument>();
+    List<SolrInputDocument> sidl = new ArrayList<>();
     for (Map<String,Object> doc : docs) {
       SolrInputDocument sd = new SolrInputDocument();
       for (Entry<String,Object> entry : doc.entrySet()) {

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java Wed Mar 12 14:39:17 2014
@@ -46,8 +46,8 @@ public class TestSolrEntityProcessorUnit
   }
 
   public void testMultiValuedFields() {
-    List<Doc> docs = new ArrayList<Doc>();
-    List<FldType> types = new ArrayList<FldType>();
+    List<Doc> docs = new ArrayList<>();
+    List<FldType> types = new ArrayList<>();
     types.add(new FldType(ID, ONE_ONE, new SVal('A', 'Z', 4, 4)));
     types.add(new FldType("description", new IRange(3, 3), new SVal('a', 'c', 1, 1)));
     Doc testDoc = createDoc(types);
@@ -66,12 +66,12 @@ public class TestSolrEntityProcessorUnit
   }
 
   private List<Doc> generateUniqueDocs(int numDocs) {
-    List<FldType> types = new ArrayList<FldType>();
+    List<FldType> types = new ArrayList<>();
     types.add(new FldType(ID, ONE_ONE, new SVal('A', 'Z', 4, 40)));
     types.add(new FldType("description", new IRange(1, 3), new SVal('a', 'c', 1, 1)));
 
-    Set<Comparable> previousIds = new HashSet<Comparable>();
-    List<Doc> docs = new ArrayList<Doc>(numDocs);
+    Set<Comparable> previousIds = new HashSet<>();
+    List<Doc> docs = new ArrayList<>(numDocs);
     for (int i = 0; i < numDocs; i++) {
       Doc doc = createDoc(types);
       while (previousIds.contains(doc.id)) {

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java Wed Mar 12 14:39:17 2014
@@ -79,11 +79,11 @@ public class TestSortedMapBackedCache ex
     DIHCache cache = null;
     try {
       cache = new SortedMapBackedCache();
-      Map<String, String> cacheProps = new HashMap<String, String>();
+      Map<String, String> cacheProps = new HashMap<>();
       cacheProps.put(DIHCacheSupport.CACHE_PRIMARY_KEY, "a_id");
       cache.open(getContext(cacheProps));
       
-      Map<String,Object> data = new HashMap<String,Object>();
+      Map<String,Object> data = new HashMap<>();
       data.put("a_id", null);
       data.put("bogus", "data");
       cache.add(data);
@@ -108,7 +108,7 @@ public class TestSortedMapBackedCache ex
   public void testCacheReopensWithUpdate() {
     DIHCache cache = null;
     try {      
-      Map<String, String> cacheProps = new HashMap<String, String>();
+      Map<String, String> cacheProps = new HashMap<>();
       cacheProps.put(DIHCacheSupport.CACHE_PRIMARY_KEY, "a_id");
       
       cache = new SortedMapBackedCache();
@@ -120,7 +120,7 @@ public class TestSortedMapBackedCache ex
       // Close the cache.
       cache.close();
 
-      List<ControlData> newControlData = new ArrayList<ControlData>();
+      List<ControlData> newControlData = new ArrayList<>();
       Object[] newIdEqualsThree = null;
       int j = 0;
       for (int i = 0; i < data.size(); i++) {

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java Wed Mar 12 14:39:17 2014
@@ -25,7 +25,7 @@ import java.util.Properties;
 import org.junit.Test;
 
 public class TestURLDataSource extends AbstractDataImportHandlerTestCase {
-  private List<Map<String, String>> fields = new ArrayList<Map<String, String>>();
+  private List<Map<String, String>> fields = new ArrayList<>();
   private URLDataSource dataSource = new URLDataSource();
   private VariableResolver variableResolver = new VariableResolver();
   private Context context = AbstractDataImportHandlerTestCase.getContext(null, variableResolver,

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java Wed Mar 12 14:39:17 2014
@@ -36,7 +36,7 @@ public class TestVariableResolver extend
   @Test
   public void testSimpleNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace("hello", ns);
     assertEquals("WORLD", vri.resolve("hello.world"));
@@ -61,10 +61,10 @@ public class TestVariableResolver extend
   @Test
   public void testNestedNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace("hello", ns);
-    ns = new HashMap<String,Object>();
+    ns = new HashMap<>();
     ns.put("world1", "WORLD1");
     vri.addNamespace("hello.my", ns);
     assertEquals("WORLD1", vri.resolve("hello.my.world1"));
@@ -73,10 +73,10 @@ public class TestVariableResolver extend
   @Test
   public void test3LevelNestedNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace("hello", ns);
-    ns = new HashMap<String,Object>();
+    ns = new HashMap<>();
     ns.put("world1", "WORLD1");
     vri.addNamespace("hello.my.new", ns);
     assertEquals("WORLD1", vri.resolve("hello.my.new.world1"));
@@ -87,7 +87,7 @@ public class TestVariableResolver extend
     VariableResolver vri = new VariableResolver();
     vri.setEvaluators(new DataImporter().getEvaluators(Collections
         .<Map<String,String>> emptyList()));
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     Date d = new Date();
     ns.put("dt", d);
     vri.addNamespace("A", ns);
@@ -115,7 +115,7 @@ public class TestVariableResolver extend
   @Test
   public void testDefaultNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace(null, ns);
     assertEquals("WORLD", vri.resolve("world"));
@@ -124,7 +124,7 @@ public class TestVariableResolver extend
   @Test
   public void testDefaultNamespace1() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace(null, ns);
     assertEquals("WORLD", vri.resolve("world"));
@@ -133,8 +133,8 @@ public class TestVariableResolver extend
   @Test
   public void testFunctionNamespace1() throws Exception {
     VariableResolver resolver = new VariableResolver();
-    final List<Map<String,String>> l = new ArrayList<Map<String,String>>();
-    Map<String,String> m = new HashMap<String,String>();
+    final List<Map<String,String>> l = new ArrayList<>();
+    Map<String,String> m = new HashMap<>();
     m.put("name", "test");
     m.put("class", E.class.getName());
     l.add(m);

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java Wed Mar 12 14:39:17 2014
@@ -57,7 +57,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(cdData), Context.FULL_DUMP, fields, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)
@@ -80,7 +80,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(testXml), Context.FULL_DUMP, fields, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)
@@ -109,7 +109,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(textMultipleDocuments), Context.FULL_DUMP, fields, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)
@@ -276,7 +276,7 @@ public class TestXPathEntityProcessor ex
     xPathEntityProcessor.blockingQueueTimeOutUnits = TimeUnit.MICROSECONDS;
     
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       if (rowsToRead >= 0 && result.size() >= rowsToRead) {
         Thread.currentThread().interrupt();
@@ -346,7 +346,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(cdData), Context.FULL_DUMP, null, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java Wed Mar 12 14:39:17 2014
@@ -136,8 +136,8 @@ public class TestXPathRecordReader exten
     rr.addField("a", "/root/x/b/@a", false);
     rr.addField("b", "/root/x/b/@b", false);
 
-    final List<Map<String, Object>> a = new ArrayList<Map<String, Object>>();
-    final List<Map<String, Object>> x = new ArrayList<Map<String, Object>>();
+    final List<Map<String, Object>> a = new ArrayList<>();
+    final List<Map<String, Object>> x = new ArrayList<>();
     rr.streamRecords(new StringReader(xml), new XPathRecordReader.Handler() {
       @Override
       public void handle(Map<String, Object> record, String xpath) {

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java Wed Mar 12 14:39:17 2014
@@ -106,11 +106,11 @@ public class TestZKPropertiesWriter exte
     SimpleDateFormat df = new SimpleDateFormat(dateFormat, Locale.ROOT);
     Date oneSecondAgo = new Date(System.currentTimeMillis() - 1000);
 
-    Map<String, String> init = new HashMap<String, String>();
+    Map<String, String> init = new HashMap<>();
     init.put("dateFormat", dateFormat);
     ZKPropertiesWriter spw = new ZKPropertiesWriter();
     spw.init(new DataImporter(h.getCore(), "dataimport"), init);
-    Map<String, Object> props = new HashMap<String, Object>();
+    Map<String, Object> props = new HashMap<>();
     props.put("SomeDates.last_index_time", oneSecondAgo);
     props.put("last_index_time", oneSecondAgo);
     spw.persist(props);

Modified: lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java (original)
+++ lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java Wed Mar 12 14:39:17 2014
@@ -36,17 +36,17 @@ import java.util.Map;
  */
 public class TripleThreatTransformer {
   public Object transformRow(Map<String, Object> row) {
-    List<Map<String, Object>> rows = new ArrayList<Map<String, Object>>(3);
+    List<Map<String, Object>> rows = new ArrayList<>(3);
     rows.add(row);
     rows.add(addDuplicateBackwardsValues(row));
-    rows.add(new LinkedHashMap<String,Object>(row));
+    rows.add(new LinkedHashMap<>(row));
     rows.get(2).put("AddAColumn_s", "Added");
     modifyIdColumn(rows.get(1), 1);
     modifyIdColumn(rows.get(2), 2);
     return rows;
   }
   private LinkedHashMap<String,Object> addDuplicateBackwardsValues(Map<String, Object> row) {
-    LinkedHashMap<String,Object> n = new LinkedHashMap<String,Object>();
+    LinkedHashMap<String,Object> n = new LinkedHashMap<>();
     for(Map.Entry<String,Object> entry : row.entrySet()) {
       String key = entry.getKey();
       if(!"id".equalsIgnoreCase(key)) {

Modified: lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java (original)
+++ lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java Wed Mar 12 14:39:17 2014
@@ -82,7 +82,7 @@ public class ExtractingRequestHandler ex
       }
       NamedList configDateFormats = (NamedList) initArgs.get(DATE_FORMATS);
       if (configDateFormats != null && configDateFormats.size() > 0) {
-        dateFormats = new HashSet<String>();
+        dateFormats = new HashSet<>();
         Iterator<Map.Entry> it = configDateFormats.iterator();
         while (it.hasNext()) {
           String format = (String) it.next().getValue();

Modified: lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java (original)
+++ lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java Wed Mar 12 14:39:17 2014
@@ -41,7 +41,7 @@ import org.slf4j.LoggerFactory;
 public class RegexRulesPasswordProvider implements PasswordProvider {
   private static final Logger log = LoggerFactory.getLogger(RegexRulesPasswordProvider.class);
   
-  private LinkedHashMap<Pattern,String> passwordMap = new LinkedHashMap<Pattern,String>(); 
+  private LinkedHashMap<Pattern,String> passwordMap = new LinkedHashMap<>();
   private String explicitPassword; 
   
   @Override
@@ -72,7 +72,7 @@ public class RegexRulesPasswordProvider 
    * @param is input stream for the file
    */
   public static LinkedHashMap<Pattern,String> parseRulesFile(InputStream is) {
-    LinkedHashMap<Pattern,String> rules = new LinkedHashMap<Pattern,String>();
+    LinkedHashMap<Pattern,String> rules = new LinkedHashMap<>();
     BufferedReader br = new BufferedReader(IOUtils.getDecodingReader(is, IOUtils.CHARSET_UTF_8));
     String line;
     try {

Modified: lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java (original)
+++ lucene/dev/trunk/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java Wed Mar 12 14:39:17 2014
@@ -57,7 +57,7 @@ public class SolrContentHandler extends 
   protected StringBuilder catchAllBuilder = new StringBuilder(2048);
   protected IndexSchema schema;
   protected Map<String, StringBuilder> fieldBuilders = Collections.emptyMap();
-  private LinkedList<StringBuilder> bldrStack = new LinkedList<StringBuilder>();
+  private LinkedList<StringBuilder> bldrStack = new LinkedList<>();
 
   protected boolean captureAttribs;
   protected boolean lowerNames;
@@ -89,7 +89,7 @@ public class SolrContentHandler extends 
     this.defaultField = params.get(DEFAULT_FIELD, "");
     String[] captureFields = params.getParams(CAPTURE_ELEMENTS);
     if (captureFields != null && captureFields.length > 0) {
-      fieldBuilders = new HashMap<String, StringBuilder>();
+      fieldBuilders = new HashMap<>();
       for (int i = 0; i < captureFields.length; i++) {
         fieldBuilders.put(captureFields[i], new StringBuilder());
       }
@@ -158,7 +158,7 @@ public class SolrContentHandler extends 
    */
   protected void addLiterals() {
     Iterator<String> paramNames = params.getParameterNamesIterator();
-    literalFieldNames = new HashSet<String>();
+    literalFieldNames = new HashSet<>();
     while (paramNames.hasNext()) {
       String pname = paramNames.next();
       if (!pname.startsWith(LITERALS_PREFIX)) continue;

Modified: lucene/dev/trunk/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java (original)
+++ lucene/dev/trunk/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java Wed Mar 12 14:39:17 2014
@@ -616,7 +616,7 @@ public class ExtractingRequestHandlerTes
     try {
       // TODO: stop using locally defined streams once stream.file and
       // stream.body work everywhere
-      List<ContentStream> cs = new ArrayList<ContentStream>();
+      List<ContentStream> cs = new ArrayList<>();
       cs.add(new ContentStreamBase.FileStream(getFile(filename)));
       req.setContentStreams(cs);
       return h.queryAndResponse("/update/extract", req);

Modified: lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java (original)
+++ lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java Wed Mar 12 14:39:17 2014
@@ -53,7 +53,7 @@ public class LangDetectLanguageIdentifie
       Detector detector = DetectorFactory.create();
       detector.append(content);
       ArrayList<Language> langlist = detector.getProbabilities();
-      ArrayList<DetectedLanguage> solrLangList = new ArrayList<DetectedLanguage>();
+      ArrayList<DetectedLanguage> solrLangList = new ArrayList<>();
       for (Language l: langlist) {
         solrLangList.add(new DetectedLanguage(l.lang, l.prob));
       }

Modified: lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java (original)
+++ lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java Wed Mar 12 14:39:17 2014
@@ -125,7 +125,7 @@ public class LangDetectLanguageIdentifie
       return;
     }
     loaded = true;
-    List<String> profileData = new ArrayList<String>();
+    List<String> profileData = new ArrayList<>();
     Charset encoding = Charset.forName("UTF-8");
     for (String language : languages) {
       InputStream stream = LangDetectLanguageIdentifierUpdateProcessor.class.getResourceAsStream("langdetect-profiles/" + language);

Modified: lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java (original)
+++ lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java Wed Mar 12 14:39:17 2014
@@ -107,7 +107,7 @@ public abstract class LanguageIdentifier
         fallbackFields = params.get(FALLBACK_FIELDS).split(",");
       }
       overwrite = params.getBool(OVERWRITE, false);
-      langWhitelist = new HashSet<String>();
+      langWhitelist = new HashSet<>();
       threshold = params.getDouble(THRESHOLD, DOCID_THRESHOLD_DEFAULT);
       if(params.get(LANG_WHITELIST, "").length() > 0) {
         for(String lang : params.get(LANG_WHITELIST, "").split(",")) {
@@ -133,15 +133,15 @@ public abstract class LanguageIdentifier
       } else {
         mapIndividualFields = mapFields;
       }
-      mapIndividualFieldsSet = new HashSet<String>(Arrays.asList(mapIndividualFields));
+      mapIndividualFieldsSet = new HashSet<>(Arrays.asList(mapIndividualFields));
       // Compile a union of the lists of fields to map
-      allMapFieldsSet = new HashSet<String>(Arrays.asList(mapFields));
+      allMapFieldsSet = new HashSet<>(Arrays.asList(mapFields));
       if(Arrays.equals(mapFields, mapIndividualFields)) {
         allMapFieldsSet.addAll(mapIndividualFieldsSet);
       }
 
       // Normalize detected langcode onto normalized langcode
-      lcMap = new HashMap<String,String>();
+      lcMap = new HashMap<>();
       if(params.get(LCMAP) != null) {
         for(String mapping : params.get(LCMAP).split("[, ]")) {
           String[] keyVal = mapping.split(":");
@@ -154,7 +154,7 @@ public abstract class LanguageIdentifier
       }
 
       // Language Code mapping
-      mapLcMap = new HashMap<String,String>();
+      mapLcMap = new HashMap<>();
       if(params.get(MAP_LCMAP) != null) {
         for(String mapping : params.get(MAP_LCMAP).split("[, ]")) {
           String[] keyVal = mapping.split(":");
@@ -199,7 +199,7 @@ public abstract class LanguageIdentifier
    */
   protected SolrInputDocument process(SolrInputDocument doc) {
     String docLang = null;
-    HashSet<String> docLangs = new HashSet<String>();
+    HashSet<String> docLangs = new HashSet<>();
     String fallbackLang = getFallbackLang(doc, fallbackFields, fallbackValue);
 
     if(langField == null || !doc.containsKey(langField) || (doc.containsKey(langField) && overwrite)) {
@@ -323,7 +323,7 @@ public abstract class LanguageIdentifier
    * @return a string of the chosen language
    */
   protected String resolveLanguage(String language, String fallbackLang) {
-    List<DetectedLanguage> l = new ArrayList<DetectedLanguage>();
+    List<DetectedLanguage> l = new ArrayList<>();
     l.add(new DetectedLanguage(language, 1.0));
     return resolveLanguage(l, fallbackLang);
   }

Modified: lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java (original)
+++ lucene/dev/trunk/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java Wed Mar 12 14:39:17 2014
@@ -41,7 +41,7 @@ public class TikaLanguageIdentifierUpdat
   
   @Override
   protected List<DetectedLanguage> detectLanguage(String content) {
-    List<DetectedLanguage> languages = new ArrayList<DetectedLanguage>();
+    List<DetectedLanguage> languages = new ArrayList<>();
     if(content.trim().length() != 0) { 
       LanguageIdentifier identifier = new LanguageIdentifier(content);
       // FIXME: Hack - we get the distance from toString and calculate our own certainty score

Modified: lucene/dev/trunk/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java (original)
+++ lucene/dev/trunk/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java Wed Mar 12 14:39:17 2014
@@ -124,7 +124,7 @@ public abstract class LanguageIdentifier
     assertEquals("zh", liProcessor.resolveLanguage("zh_cn", "NA"));
     assertEquals("zh", liProcessor.resolveLanguage("zh_tw", "NA"));
     assertEquals("no", liProcessor.resolveLanguage("no", "NA"));
-    List<DetectedLanguage> langs = new ArrayList<DetectedLanguage>();
+    List<DetectedLanguage> langs = new ArrayList<>();
     langs.add(new DetectedLanguage("zh_cn", 0.8));
     assertEquals("zh", liProcessor.resolveLanguage(langs, "NA"));
   }
@@ -246,7 +246,7 @@ public abstract class LanguageIdentifier
     liProcessor = createLangIdProcessor(parameters);
 
     // No detected languages
-    langs = new ArrayList<DetectedLanguage>();
+    langs = new ArrayList<>();
     assertEquals("", liProcessor.resolveLanguage(langs, null));
     assertEquals("fallback", liProcessor.resolveLanguage(langs, "fallback"));
 
@@ -255,7 +255,7 @@ public abstract class LanguageIdentifier
     assertEquals("one", liProcessor.resolveLanguage(langs, "fallback"));    
 
     // One detected language under default threshold
-    langs = new ArrayList<DetectedLanguage>();
+    langs = new ArrayList<>();
     langs.add(new DetectedLanguage("under", 0.1));
     assertEquals("fallback", liProcessor.resolveLanguage(langs, "fallback"));    
   }

Modified: lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java (original)
+++ lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java Wed Mar 12 14:39:17 2014
@@ -81,7 +81,7 @@ class BatchWriter {
     private UpdateResponse result;
 
     public Batch(Collection<SolrInputDocument> batch) {
-      documents = new ArrayList<SolrInputDocument>(batch);
+      documents = new ArrayList<>(batch);
     }
 
     public void run() {
@@ -111,7 +111,7 @@ class BatchWriter {
 
     protected void reset(List<SolrInputDocument> documents) {
       if (this.documents == null) {
-        this.documents = new ArrayList<SolrInputDocument>(documents);
+        this.documents = new ArrayList<>(documents);
       } else {
         this.documents.clear();
         this.documents.addAll(documents);
@@ -121,7 +121,7 @@ class BatchWriter {
 
     protected void reset(SolrInputDocument document) {
       if (this.documents == null) {
-        this.documents = new ArrayList<SolrInputDocument>();
+        this.documents = new ArrayList<>();
       } else {
         this.documents.clear();
       }

Modified: lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java (original)
+++ lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java Wed Mar 12 14:39:17 2014
@@ -59,8 +59,8 @@ class GoLive {
         new LinkedBlockingQueue<Runnable>());
     
     try {
-      CompletionService<Request> completionService = new ExecutorCompletionService<Request>(executor);
-      Set<Future<Request>> pending = new HashSet<Future<Request>>();
+      CompletionService<Request> completionService = new ExecutorCompletionService<>(executor);
+      Set<Future<Request>> pending = new HashSet<>();
       int cnt = -1;
       for (final FileStatus dir : outDirs) {
         

Modified: lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java (original)
+++ lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java Wed Mar 12 14:39:17 2014
@@ -541,7 +541,7 @@ public class MapReduceIndexerTool extend
 
   static List<List<String>> buildShardUrls(List<Object> urls, Integer numShards) {
     if (urls == null) return null;
-    List<List<String>> shardUrls = new ArrayList<List<String>>(urls.size());
+    List<List<String>> shardUrls = new ArrayList<>(urls.size());
     List<String> list = null;
     
     int sz;
@@ -551,7 +551,7 @@ public class MapReduceIndexerTool extend
     sz = (int) Math.ceil(urls.size() / (float)numShards);
     for (int i = 0; i < urls.size(); i++) {
       if (i % sz == 0) {
-        list = new ArrayList<String>();
+        list = new ArrayList<>();
         shardUrls.add(list);
       }
       list.add((String) urls.get(i));

Modified: lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java (original)
+++ lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java Wed Mar 12 14:39:17 2014
@@ -158,7 +158,7 @@ public class SolrOutputFormat<K, V> exte
     Utils.getLogConfigFile(context.getConfiguration());
     Path workDir = getDefaultWorkFile(context, "");
     int batchSize = getBatchSize(context.getConfiguration());
-    return new SolrRecordWriter<K, V>(context, workDir, batchSize);
+    return new SolrRecordWriter<>(context, workDir, batchSize);
   }
 
   public static void setupSolrHomeCache(File solrHomeDir, Job job) throws IOException{
@@ -202,7 +202,7 @@ public class SolrOutputFormat<K, V> exte
   }
 
   private static void createZip(File dir, File out) throws IOException {
-    HashSet<File> files = new HashSet<File>();
+    HashSet<File> files = new HashSet<>();
     // take only conf/ and lib/
     for (String allowedDirectory : SolrRecordWriter
         .getAllowedConfigDirectories()) {

Modified: lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java (original)
+++ lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java Wed Mar 12 14:39:17 2014
@@ -53,10 +53,10 @@ class SolrRecordWriter<K, V> extends Rec
   
   private static final Logger LOG = LoggerFactory.getLogger(SolrRecordWriter.class);
 
-  public final static List<String> allowedConfigDirectories = new ArrayList<String>(
+  public final static List<String> allowedConfigDirectories = new ArrayList<>(
       Arrays.asList(new String[] { "conf", "lib", "solr.xml" }));
 
-  public final static Set<String> requiredConfigDirectories = new HashSet<String>();
+  public final static Set<String> requiredConfigDirectories = new HashSet<>();
   
   static {
     requiredConfigDirectories.add("conf");
@@ -101,7 +101,7 @@ class SolrRecordWriter<K, V> extends Rec
   private long numDocsWritten = 0;
   private long nextLogTime = System.nanoTime();
 
-  private static HashMap<TaskID, Reducer<?,?,?,?>.Context> contextMap = new HashMap<TaskID, Reducer<?,?,?,?>.Context>();
+  private static HashMap<TaskID, Reducer<?,?,?,?>.Context> contextMap = new HashMap<>();
   
   public SolrRecordWriter(TaskAttemptContext context, Path outputShardDir, int batchSize) {
     this.batchSize = batchSize;

Modified: lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java (original)
+++ lucene/dev/trunk/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java Wed Mar 12 14:39:17 2014
@@ -55,14 +55,14 @@ final class ZooKeeperInspector {
 
     DocCollection docCollection = extractDocCollection(zkHost, collection);
     List<Slice> slices = getSortedSlices(docCollection.getSlices());
-    List<List<String>> solrUrls = new ArrayList<List<String>>(slices.size());
+    List<List<String>> solrUrls = new ArrayList<>(slices.size());
     for (Slice slice : slices) {
       if (slice.getLeader() == null) {
         throw new IllegalArgumentException("Cannot find SolrCloud slice leader. " +
             "It looks like not all of your shards are registered in ZooKeeper yet");
       }
       Collection<Replica> replicas = slice.getReplicas();
-      List<String> urls = new ArrayList<String>(replicas.size());
+      List<String> urls = new ArrayList<>(replicas.size());
       for (Replica replica : replicas) {
         ZkCoreNodeProps props = new ZkCoreNodeProps(replica);
         urls.add(props.getCoreUrl());

Modified: lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java (original)
+++ lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java Wed Mar 12 14:39:17 2014
@@ -49,12 +49,12 @@ public class LineRandomizerMapperReducer
   @Test
   public void testMapReduce2Items() throws IOException {
     mapReduceDriver.withAll(Arrays.asList(
-        new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
-        new Pair<LongWritable, Text>(new LongWritable(1), new Text("world"))
+        new Pair<>(new LongWritable(0), new Text("hello")),
+        new Pair<>(new LongWritable(1), new Text("world"))
         ));
     mapReduceDriver.withAllOutput(Arrays.asList(
-        new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())
+        new Pair<>(new Text("world"), NullWritable.get()),
+        new Pair<>(new Text("hello"), NullWritable.get())
         ));
     mapReduceDriver.runTest();
   }
@@ -62,14 +62,14 @@ public class LineRandomizerMapperReducer
   @Test
   public void testMapReduce3Items() throws IOException {
     mapReduceDriver.withAll(Arrays.asList(
-        new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
-        new Pair<LongWritable, Text>(new LongWritable(1), new Text("world")),
-        new Pair<LongWritable, Text>(new LongWritable(2), new Text("nadja"))
+        new Pair<>(new LongWritable(0), new Text("hello")),
+        new Pair<>(new LongWritable(1), new Text("world")),
+        new Pair<>(new LongWritable(2), new Text("nadja"))
         ));
     mapReduceDriver.withAllOutput(Arrays.asList(
-        new Pair<Text, NullWritable>(new Text("nadja"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())
+        new Pair<>(new Text("nadja"), NullWritable.get()),
+        new Pair<>(new Text("world"), NullWritable.get()),
+        new Pair<>(new Text("hello"), NullWritable.get())
         ));
     mapReduceDriver.runTest();
   }
@@ -77,16 +77,16 @@ public class LineRandomizerMapperReducer
   @Test
   public void testMapReduce4Items() throws IOException {
     mapReduceDriver.withAll(Arrays.asList(
-        new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
-        new Pair<LongWritable, Text>(new LongWritable(1), new Text("world")),
-        new Pair<LongWritable, Text>(new LongWritable(2), new Text("nadja")),
-        new Pair<LongWritable, Text>(new LongWritable(3), new Text("basti"))
+        new Pair<>(new LongWritable(0), new Text("hello")),
+        new Pair<>(new LongWritable(1), new Text("world")),
+        new Pair<>(new LongWritable(2), new Text("nadja")),
+        new Pair<>(new LongWritable(3), new Text("basti"))
         ));
     mapReduceDriver.withAllOutput(Arrays.asList(
-        new Pair<Text, NullWritable>(new Text("nadja"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("basti"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())
+        new Pair<>(new Text("nadja"), NullWritable.get()),
+        new Pair<>(new Text("world"), NullWritable.get()),
+        new Pair<>(new Text("basti"), NullWritable.get()),
+        new Pair<>(new Text("hello"), NullWritable.get())
         ));
     mapReduceDriver.runTest();
   }

Modified: lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java (original)
+++ lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java Wed Mar 12 14:39:17 2014
@@ -245,7 +245,7 @@ public class MorphlineGoLiveMiniMRTest e
   public void testBuildShardUrls() throws Exception {
     // 2x3
     Integer numShards = 2;
-    List<Object> urls = new ArrayList<Object>();
+    List<Object> urls = new ArrayList<>();
     urls.add("shard1");
     urls.add("shard2");
     urls.add("shard3");
@@ -303,7 +303,7 @@ public class MorphlineGoLiveMiniMRTest e
     // null shards 3x1
     numShards = null;
     
-    urls = new ArrayList<Object>();
+    urls = new ArrayList<>();
     urls.add("shard1");
     urls.add("shard2");
     urls.add("shard3");
@@ -318,7 +318,7 @@ public class MorphlineGoLiveMiniMRTest e
     
     // 2x(2,3) off balance
     numShards = 2;
-    urls = new ArrayList<Object>();
+    urls = new ArrayList<>();
     urls.add("shard1");
     urls.add("shard2");
     urls.add("shard3");
@@ -328,7 +328,7 @@ public class MorphlineGoLiveMiniMRTest e
 
     assertEquals(shardUrls.toString(), 2, shardUrls.size());
     
-    Set<Integer> counts = new HashSet<Integer>();
+    Set<Integer> counts = new HashSet<>();
     counts.add(shardUrls.get(0).size());
     counts.add(shardUrls.get(1).size());
     
@@ -388,7 +388,7 @@ public class MorphlineGoLiveMiniMRTest e
         "--go-live"
     };
     args = prependInitialArgs(args);
-    List<String> argList = new ArrayList<String>();
+    List<String> argList = new ArrayList<>();
     getShardUrlArgs(argList);
     args = concat(args, argList.toArray(new String[0]));
     
@@ -418,7 +418,7 @@ public class MorphlineGoLiveMiniMRTest e
         "--go-live-threads", Integer.toString(random().nextInt(15) + 1)
     };
     args = prependInitialArgs(args);
-    argList = new ArrayList<String>();
+    argList = new ArrayList<>();
     getShardUrlArgs(argList);
     args = concat(args, argList.toArray(new String[0]));
     
@@ -608,7 +608,7 @@ public class MorphlineGoLiveMiniMRTest e
     };
     args = prependInitialArgs(args);
 
-    argList = new ArrayList<String>();
+    argList = new ArrayList<>();
     getShardUrlArgs(argList, replicatedCollection);
     args = concat(args, argList.toArray(new String[0]));
     

Modified: lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java (original)
+++ lucene/dev/trunk/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java Wed Mar 12 14:39:17 2014
@@ -106,7 +106,7 @@ public class MorphlineReducerTest extend
       Configuration config = reduceDriver.getConfiguration();
       setupHadoopConfig(config);
       
-      List<SolrInputDocumentWritable> values = new ArrayList<SolrInputDocumentWritable>();
+      List<SolrInputDocumentWritable> values = new ArrayList<>();
       SolrInputDocument sid = new SolrInputDocument();
       String id = "myid1";
       sid.addField("id", id);

Modified: lucene/dev/trunk/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java (original)
+++ lucene/dev/trunk/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java Wed Mar 12 14:39:17 2014
@@ -151,7 +151,7 @@ public final class SolrCellBuilder imple
         cellParams.put(ExtractingParams.XPATH_EXPRESSION, xpathExpr);
       }
       
-      this.dateFormats = getConfigs().getStringList(config, "dateFormats", new ArrayList<String>(DateUtil.DEFAULT_DATE_FORMATS));
+      this.dateFormats = getConfigs().getStringList(config, "dateFormats", new ArrayList<>(DateUtil.DEFAULT_DATE_FORMATS));
       
       String handlerStr = getConfigs().getString(config, "solrContentHandlerFactory", TrimSolrContentHandlerFactory.class.getName());
       Class<? extends SolrContentHandlerFactory> factoryClass;
@@ -165,7 +165,7 @@ public final class SolrCellBuilder imple
 
       this.locale = getLocale(getConfigs().getString(config, "locale", ""));
       
-      this.mediaTypeToParserMap = new HashMap<MediaType, Parser>();
+      this.mediaTypeToParserMap = new HashMap<>();
       //MimeTypes mimeTypes = MimeTypes.getDefaultMimeTypes(); // FIXME getMediaTypeRegistry.normalize() 
 
       List<? extends Config> parserConfigs = getConfigs().getConfigList(config, "parsers");

Modified: lucene/dev/trunk/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java (original)
+++ lucene/dev/trunk/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java Wed Mar 12 14:39:17 2014
@@ -37,8 +37,8 @@ import org.junit.Test;
 
 public class SolrCellMorphlineTest extends AbstractSolrMorphlineTestBase {
 
-  private Map<String,Integer> expectedRecords = new HashMap<String,Integer>();
-  private Map<String, Map<String, Object>> expectedRecordContents = new HashMap<String, Map<String, Object>>();
+  private Map<String,Integer> expectedRecords = new HashMap<>();
+  private Map<String, Map<String, Object>> expectedRecordContents = new HashMap<>();
   @BeforeClass
   public static void beforeClass2() {
     assumeFalse("FIXME: Morphlines currently has issues with Windows paths", Constants.WINDOWS);

Modified: lucene/dev/trunk/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java (original)
+++ lucene/dev/trunk/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java Wed Mar 12 14:39:17 2014
@@ -31,8 +31,8 @@ import org.slf4j.LoggerFactory;
 class CollectingDocumentLoader implements DocumentLoader {
 
   private final int batchSize;
-  private final List<SolrInputDocument> batch = new ArrayList<SolrInputDocument> ();
-  private List<SolrInputDocument> results = new ArrayList<SolrInputDocument> ();
+  private final List<SolrInputDocument> batch = new ArrayList<> ();
+  private List<SolrInputDocument> results = new ArrayList<> ();
 
   private static final Logger LOGGER = LoggerFactory.getLogger(CollectingDocumentLoader.class);
 

Modified: lucene/dev/trunk/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java (original)
+++ lucene/dev/trunk/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java Wed Mar 12 14:39:17 2014
@@ -65,7 +65,7 @@ public class SolrUIMAConfigurationReader
 
   @SuppressWarnings("rawtypes")
   private Map<String, Map<String, MapField>> readTypesFeaturesFieldsMapping() {
-    Map<String, Map<String, MapField>> map = new HashMap<String, Map<String, MapField>>();
+    Map<String, Map<String, MapField>> map = new HashMap<>();
 
     NamedList fieldMappings = (NamedList) args.get("fieldMappings");
     /* iterate over UIMA types */
@@ -73,7 +73,7 @@ public class SolrUIMAConfigurationReader
       NamedList type = (NamedList) fieldMappings.get("type", i);
       String typeName = (String)type.get("name");
 
-      Map<String, MapField> subMap = new HashMap<String, MapField>();
+      Map<String, MapField> subMap = new HashMap<>();
       /* iterate over mapping definitions */
       for(int j = 0; j < type.size() - 1; j++){
         NamedList mapping = (NamedList) type.get("mapping", j + 1);
@@ -96,7 +96,7 @@ public class SolrUIMAConfigurationReader
 
   @SuppressWarnings("rawtypes")
   private Map<String, Object> readAEOverridingParameters() {
-    Map<String, Object> runtimeParameters = new HashMap<String, Object>();
+    Map<String, Object> runtimeParameters = new HashMap<>();
     NamedList runtimeParams = (NamedList) args.get("runtimeParameters");
     for (int i = 0; i < runtimeParams.size(); i++) {
       String name = runtimeParams.getName(i);

Modified: lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java (original)
+++ lucene/dev/trunk/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java Wed Mar 12 14:39:17 2014
@@ -192,7 +192,7 @@ public class UIMAUpdateRequestProcessorT
   }
 
   private void addDoc(String chain, String doc) throws Exception {
-    Map<String, String[]> params = new HashMap<String, String[]>();
+    Map<String, String[]> params = new HashMap<>();
     params.put(UpdateParams.UPDATE_CHAIN, new String[] { chain });
     MultiMapSolrParams mmparams = new MultiMapSolrParams(params);
     SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), (SolrParams) mmparams) {
@@ -200,7 +200,7 @@ public class UIMAUpdateRequestProcessorT
 
     UpdateRequestHandler handler = new UpdateRequestHandler();
     handler.init(null);
-    ArrayList<ContentStream> streams = new ArrayList<ContentStream>(2);
+    ArrayList<ContentStream> streams = new ArrayList<>(2);
     streams.add(new ContentStreamBase.StringStream(doc));
     req.setContentStreams(streams);
     handler.handleRequestBody(req, new SolrQueryResponse());

Modified: lucene/dev/trunk/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java (original)
+++ lucene/dev/trunk/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java Wed Mar 12 14:39:17 2014
@@ -31,7 +31,7 @@ import java.util.Iterator;
 import java.util.Map;
 
 public class SolrParamResourceLoader extends ResourceLoader {
-  private Map<String,String> templates = new HashMap<String,String>();
+  private Map<String,String> templates = new HashMap<>();
   public SolrParamResourceLoader(SolrQueryRequest request) {
     super();
 

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/SolrLogFormatter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/SolrLogFormatter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/SolrLogFormatter.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/SolrLogFormatter.java Wed Mar 12 14:39:17 2014
@@ -47,7 +47,7 @@ public class SolrLogFormatter extends Fo
 
   long startTime = System.currentTimeMillis();
   long lastTime = startTime;
-  Map<Method, String> methodAlias = new HashMap<Method, String>();
+  Map<Method, String> methodAlias = new HashMap<>();
   
   public static class Method {
     public String className;
@@ -106,9 +106,9 @@ public class SolrLogFormatter extends Fo
     Map<String, Object> coreProps;
   }
 
-  Map<SolrCore, CoreInfo> coreInfoMap = new WeakHashMap<SolrCore, CoreInfo>();    // TODO: use something that survives across a core reload?
+  Map<SolrCore, CoreInfo> coreInfoMap = new WeakHashMap<>();    // TODO: use something that survives across a core reload?
 
-  public Map<String,String> classAliases = new HashMap<String, String>();
+  public Map<String,String> classAliases = new HashMap<>();
 
   @Override
   public String format(LogRecord record) {
@@ -379,7 +379,7 @@ sb.append("(group_name=").append(tg.getN
 
 
 
-  static ThreadLocal<String> threadLocal = new ThreadLocal<String>();
+  static ThreadLocal<String> threadLocal = new ThreadLocal<>();
   
   public static void main(String[] args) throws Exception {
 

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java Wed Mar 12 14:39:17 2014
@@ -775,7 +775,7 @@ public class LegacyHTMLStripCharFilter e
 
   private static final HashMap<String,Character> entityTable;
   static {
-    entityTable = new HashMap<String,Character>();
+    entityTable = new HashMap<>();
     // entityName and entityVal generated from the python script
     // included in comments at the end of this file.
     final String[] entityName={ "zwnj","aring","gt","yen","ograve","Chi","delta","rang","sup","trade","Ntilde","xi","upsih","nbsp","Atilde","radic","otimes","aelig","oelig","equiv","ni","infin","Psi","auml","cup","Epsilon","otilde","lt","Icirc","Eacute","Lambda","sbquo","Prime","prime","psi","Kappa","rsaquo","Tau","uacute","ocirc","lrm","zwj","cedil","Alpha","not","amp","AElig","oslash","acute","lceil","alefsym","laquo","shy","loz","ge","Igrave","nu","Ograve","lsaquo","sube","euro","rarr","sdot","rdquo","Yacute","lfloor","lArr","Auml","Dagger","brvbar","Otilde","szlig","clubs","diams","agrave","Ocirc","Iota","Theta","Pi","zeta","Scaron","frac14","egrave","sub","iexcl","frac12","ordf","sum","prop","Uuml","ntilde","atilde","asymp","uml","prod","nsub","reg","rArr","Oslash","emsp","THORN","yuml","aacute","Mu","hArr","le","thinsp","dArr","ecirc","bdquo","Sigma","Aring","tilde","nabla","mdash","uarr","times","Ugrave","Eta","Agrave","chi","real","circ","eth","rceil","iuml","gamma","lambda"
 ,"harr","Egrave","frac34","dagger","divide","Ouml","image","ndash","hellip","igrave","Yuml","ang","alpha","frasl","ETH","lowast","Nu","plusmn","bull","sup1","sup2","sup3","Aacute","cent","oline","Beta","perp","Delta","there4","pi","iota","empty","euml","notin","iacute","para","epsilon","weierp","OElig","uuml","larr","icirc","Upsilon","omicron","upsilon","copy","Iuml","Oacute","Xi","kappa","ccedil","Ucirc","cap","mu","scaron","lsquo","isin","Zeta","minus","deg","and","tau","pound","curren","int","ucirc","rfloor","ensp","crarr","ugrave","exist","cong","theta","oplus","permil","Acirc","piv","Euml","Phi","Iacute","quot","Uacute","Omicron","ne","iquest","eta","rsquo","yacute","Rho","darr","Ecirc","Omega","acirc","sim","phi","sigmaf","macr","thetasym","Ccedil","ordm","uArr","forall","beta","fnof","rho","micro","eacute","omega","middot","Gamma","rlm","lang","spades","supe","thorn","ouml","or","raquo","part","sect","ldquo","hearts","sigma","oacute"};

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/accumulator/BasicAccumulator.java Wed Mar 12 14:39:17 2014
@@ -103,7 +103,7 @@ public class BasicAccumulator extends Va
   }
   
   public NamedList<?> export(){
-    NamedList<Object> base = new NamedList<Object>();
+    NamedList<Object> base = new NamedList<>();
     for (int count = 0; count < expressions.length; count++) {
       if (!hiddenExpressions.contains(expressionNames[count])) {
         base.add(expressionNames[count], expressions[count].getValue());

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java Wed Mar 12 14:39:17 2014
@@ -98,14 +98,14 @@ public class FacetingAccumulator extends
     List<RangeFacetRequest> rangeFreqs = request.getRangeFacets();
     List<QueryFacetRequest> queryFreqs = request.getQueryFacets();
 
-    this.fieldFacetExpressions = new LinkedHashMap<String,Map<String,Expression[]>>(fieldFreqs.size());
-    this.rangeFacetExpressions = new LinkedHashMap<String,Map<String,Expression[]>>(rangeFreqs.size());
-    this.queryFacetExpressions = new LinkedHashMap<String,Map<String,Expression[]>>(queryFreqs.size());
-    this.fieldFacetCollectors = new LinkedHashMap<String,Map<String,StatsCollector[]>>(fieldFreqs.size());
-    this.rangeFacetCollectors = new LinkedHashMap<String,Map<String,StatsCollector[]>>(rangeFreqs.size());
-    this.queryFacetCollectors = new LinkedHashMap<String,Map<String,StatsCollector[]>>(queryFreqs.size());
-    this.facetAccumulators = new ArrayList<FieldFacetAccumulator>();
-    this.hiddenFieldFacets = new HashSet<String>();
+    this.fieldFacetExpressions = new LinkedHashMap<>(fieldFreqs.size());
+    this.rangeFacetExpressions = new LinkedHashMap<>(rangeFreqs.size());
+    this.queryFacetExpressions = new LinkedHashMap<>(queryFreqs.size());
+    this.fieldFacetCollectors = new LinkedHashMap<>(fieldFreqs.size());
+    this.rangeFacetCollectors = new LinkedHashMap<>(rangeFreqs.size());
+    this.queryFacetCollectors = new LinkedHashMap<>(queryFreqs.size());
+    this.facetAccumulators = new ArrayList<>();
+    this.hiddenFieldFacets = new HashSet<>();
     
     /**
      * For each field facet request add a bucket to the {@link Expression} map and {@link StatsCollector} map.
@@ -130,13 +130,13 @@ public class FacetingAccumulator extends
      * are not created initially.
      */
     for( RangeFacetRequest freq : rangeFreqs ){
-      if( rangeFacets == null ) rangeFacets = new ArrayList<RangeFacetRequest>();
+      if( rangeFacets == null ) rangeFacets = new ArrayList<>();
       rangeFacets.add(freq);
       rangeFacetExpressions.put(freq.getName(), new LinkedHashMap<String,Expression[]>() );
       rangeFacetCollectors.put(freq.getName(), new LinkedHashMap<String,StatsCollector[]>());
     }
     for( QueryFacetRequest freq : queryFreqs ){
-      if( queryFacets == null ) queryFacets = new ArrayList<QueryFacetRequest>();
+      if( queryFacets == null ) queryFacets = new ArrayList<>();
       queryFacets.add(freq);
       queryFacetExpressions.put(freq.getName(), new LinkedHashMap<String,Expression[]>() );
       queryFacetCollectors.put(freq.getName(), new LinkedHashMap<String,StatsCollector[]>());
@@ -442,7 +442,7 @@ public class FacetingAccumulator extends
   @SuppressWarnings("unchecked")
   public NamedList<?> export() {
     final NamedList<Object> base = (NamedList<Object>)super.export();
-    NamedList<NamedList<?>> facetList = new NamedList<NamedList<?>>();
+    NamedList<NamedList<?>> facetList = new NamedList<>();
     
     // Add the field facet buckets to the output
     base.add("fieldFacets",facetList);
@@ -452,7 +452,7 @@ public class FacetingAccumulator extends
         continue;
       }
       final Map<String,Expression[]> buckets = fieldFacetExpressions.get(name);
-      final NamedList<Object> bucketBase = new NamedList<Object>();
+      final NamedList<Object> bucketBase = new NamedList<>();
 
       Iterable<Entry<String,Expression[]>> iter = buckets.entrySet();
       
@@ -471,7 +471,7 @@ public class FacetingAccumulator extends
         final Expression first = buckets.values().iterator().next()[sortPlace];
         final Comparator<Expression> comp = (Comparator<Expression>) first.comparator(sort.getDirection());
         
-        final List<Entry<String,Expression[]>> sorted = new ArrayList<Entry<String,Expression[]>>(buckets.size());
+        final List<Entry<String,Expression[]>> sorted = new ArrayList<>(buckets.size());
         Iterables.addAll(sorted, iter);
         Collections.sort(sorted, new EntryComparator(comp,sortPlace));
         iter = sorted;
@@ -493,12 +493,12 @@ public class FacetingAccumulator extends
     }
 
     // Add the range facet buckets to the output
-    facetList = new NamedList<NamedList<?>>();
+    facetList = new NamedList<>();
     base.add("rangeFacets",facetList);
     for( RangeFacetRequest freq : request.getRangeFacets() ){
       final String name = freq.getName();
       final Map<String,Expression[]> buckets = rangeFacetExpressions.get(name);
-      final NamedList<Object> bucketBase = new NamedList<Object>();
+      final NamedList<Object> bucketBase = new NamedList<>();
 
       Iterable<Entry<String,Expression[]>> iter = buckets.entrySet();
       
@@ -510,12 +510,12 @@ public class FacetingAccumulator extends
     }
     
     // Add the query facet buckets to the output
-    facetList = new NamedList<NamedList<?>>();
+    facetList = new NamedList<>();
     base.add("queryFacets",facetList);
     for( QueryFacetRequest freq : request.getQueryFacets() ){
       final String name = freq.getName();
       final Map<String,Expression[]> buckets = queryFacetExpressions.get(name);
-      final NamedList<Object> bucketBase = new NamedList<Object>();
+      final NamedList<Object> bucketBase = new NamedList<>();
 
       Iterable<Entry<String,Expression[]>> iter = buckets.entrySet();
       
@@ -535,7 +535,7 @@ public class FacetingAccumulator extends
    * @return named list of expressions
    */
   public NamedList<?> export(Expression[] expressionArr) {
-    NamedList<Object> base = new NamedList<Object>();
+    NamedList<Object> base = new NamedList<>();
     for (int count = 0; count < expressionArr.length; count++) {
       if (!hiddenExpressions.contains(expressionNames[count])) {
         base.add(expressionNames[count], expressionArr[count].getValue());

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/expression/ExpressionFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/expression/ExpressionFactory.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/expression/ExpressionFactory.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/expression/ExpressionFactory.java Wed Mar 12 14:39:17 2014
@@ -158,7 +158,7 @@ public class ExpressionFactory {
     String[] strings = new String[1];
     int stack = 0;
     int start = 0;
-    List<String> arguments = new ArrayList<String>();
+    List<String> arguments = new ArrayList<>();
     char[] chars = expression.toCharArray();
     for (int count = 0; count < expression.length(); count++) {
       char c = chars[count];

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/plugin/AnalyticsStatisticsCollector.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/plugin/AnalyticsStatisticsCollector.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/plugin/AnalyticsStatisticsCollector.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/plugin/AnalyticsStatisticsCollector.java Wed Mar 12 14:39:17 2014
@@ -88,7 +88,7 @@ public class AnalyticsStatisticsCollecto
   }
 
   public NamedList<Object> getStatistics() {
-    NamedList<Object> lst = new SimpleOrderedMap<Object>();
+    NamedList<Object> lst = new SimpleOrderedMap<>();
     Snapshot snapshot = requestTimes.getSnapshot();
     lst.add("requests", numRequests.longValue());
     lst.add("analyticsRequests", numAnalyticsRequests.longValue());

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/request/AnalyticsContentHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/request/AnalyticsContentHandler.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/request/AnalyticsContentHandler.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/request/AnalyticsContentHandler.java Wed Mar 12 14:39:17 2014
@@ -177,28 +177,28 @@ public class AnalyticsContentHandler imp
           }
           
           // Initiate Range Facet classes
-          gaps = new ArrayList<String>();
+          gaps = new ArrayList<>();
           includeBoundaries = EnumSet.noneOf(FacetRangeInclude.class);
           otherRanges = EnumSet.noneOf(FacetRangeOther.class);
           inRangeFacet = true;
         } else if (localName.equals(QUERY_FACET)) {
           // Start a Query Facet Request
-          queries = new ArrayList<String>();
+          queries = new ArrayList<>();
           inQueryFacet = true;
         }
       } else if (localName.equals(ANALYTICS_REQUEST)){
         // Start an Analytics Request
         
         // Renew each list.
-        fieldFacetList = new ArrayList<FieldFacetRequest>();
-        rangeFacetList = new ArrayList<RangeFacetRequest>();
-        queryFacetList = new ArrayList<QueryFacetRequest>();
-        expressionList = new ArrayList<ExpressionRequest>();
+        fieldFacetList = new ArrayList<>();
+        rangeFacetList = new ArrayList<>();
+        queryFacetList = new ArrayList<>();
+        expressionList = new ArrayList<>();
         inRequest = true;
       }
     } else if (localName.equals(ANALYTICS_REQUEST_ENVELOPE)){
       //Begin the parsing of the Analytics Requests
-      requests = new ArrayList<AnalyticsRequest>();
+      requests = new ArrayList<>();
       inEnvelope = true;
     }
   }

Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/request/AnalyticsRequest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/request/AnalyticsRequest.java?rev=1576755&r1=1576754&r2=1576755&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/request/AnalyticsRequest.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/analytics/request/AnalyticsRequest.java Wed Mar 12 14:39:17 2014
@@ -38,11 +38,11 @@ public class AnalyticsRequest {
   
   public AnalyticsRequest(String name) {
     this.name = name;
-    expressions = new ArrayList<ExpressionRequest>();
-    hiddenExpressions = new HashSet<String>();
-    fieldFacets = new ArrayList<FieldFacetRequest>();
-    rangeFacets = new ArrayList<RangeFacetRequest>();
-    queryFacets = new ArrayList<QueryFacetRequest>();
+    expressions = new ArrayList<>();
+    hiddenExpressions = new HashSet<>();
+    fieldFacets = new ArrayList<>();
+    rangeFacets = new ArrayList<>();
+    queryFacets = new ArrayList<>();
   }
   
   public String getName() {