You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2014/03/12 19:14:27 UTC

svn commit: r1576837 [18/29] - in /lucene/dev/branches/branch_4x: ./ lucene/ lucene/analysis/ lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/ lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/ lucene/analysis/co...

Modified: lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java Wed Mar 12 18:14:05 2014
@@ -46,8 +46,8 @@ public class TestSolrEntityProcessorUnit
   }
 
   public void testMultiValuedFields() {
-    List<Doc> docs = new ArrayList<Doc>();
-    List<FldType> types = new ArrayList<FldType>();
+    List<Doc> docs = new ArrayList<>();
+    List<FldType> types = new ArrayList<>();
     types.add(new FldType(ID, ONE_ONE, new SVal('A', 'Z', 4, 4)));
     types.add(new FldType("description", new IRange(3, 3), new SVal('a', 'c', 1, 1)));
     Doc testDoc = createDoc(types);
@@ -66,12 +66,12 @@ public class TestSolrEntityProcessorUnit
   }
 
   private List<Doc> generateUniqueDocs(int numDocs) {
-    List<FldType> types = new ArrayList<FldType>();
+    List<FldType> types = new ArrayList<>();
     types.add(new FldType(ID, ONE_ONE, new SVal('A', 'Z', 4, 40)));
     types.add(new FldType("description", new IRange(1, 3), new SVal('a', 'c', 1, 1)));
 
-    Set<Comparable> previousIds = new HashSet<Comparable>();
-    List<Doc> docs = new ArrayList<Doc>(numDocs);
+    Set<Comparable> previousIds = new HashSet<>();
+    List<Doc> docs = new ArrayList<>(numDocs);
     for (int i = 0; i < numDocs; i++) {
       Doc doc = createDoc(types);
       while (previousIds.contains(doc.id)) {

Modified: lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java Wed Mar 12 18:14:05 2014
@@ -79,11 +79,11 @@ public class TestSortedMapBackedCache ex
     DIHCache cache = null;
     try {
       cache = new SortedMapBackedCache();
-      Map<String, String> cacheProps = new HashMap<String, String>();
+      Map<String, String> cacheProps = new HashMap<>();
       cacheProps.put(DIHCacheSupport.CACHE_PRIMARY_KEY, "a_id");
       cache.open(getContext(cacheProps));
       
-      Map<String,Object> data = new HashMap<String,Object>();
+      Map<String,Object> data = new HashMap<>();
       data.put("a_id", null);
       data.put("bogus", "data");
       cache.add(data);
@@ -108,7 +108,7 @@ public class TestSortedMapBackedCache ex
   public void testCacheReopensWithUpdate() {
     DIHCache cache = null;
     try {      
-      Map<String, String> cacheProps = new HashMap<String, String>();
+      Map<String, String> cacheProps = new HashMap<>();
       cacheProps.put(DIHCacheSupport.CACHE_PRIMARY_KEY, "a_id");
       
       cache = new SortedMapBackedCache();
@@ -120,7 +120,7 @@ public class TestSortedMapBackedCache ex
       // Close the cache.
       cache.close();
 
-      List<ControlData> newControlData = new ArrayList<ControlData>();
+      List<ControlData> newControlData = new ArrayList<>();
       Object[] newIdEqualsThree = null;
       int j = 0;
       for (int i = 0; i < data.size(); i++) {

Modified: lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java Wed Mar 12 18:14:05 2014
@@ -25,7 +25,7 @@ import java.util.Properties;
 import org.junit.Test;
 
 public class TestURLDataSource extends AbstractDataImportHandlerTestCase {
-  private List<Map<String, String>> fields = new ArrayList<Map<String, String>>();
+  private List<Map<String, String>> fields = new ArrayList<>();
   private URLDataSource dataSource = new URLDataSource();
   private VariableResolver variableResolver = new VariableResolver();
   private Context context = AbstractDataImportHandlerTestCase.getContext(null, variableResolver,

Modified: lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java Wed Mar 12 18:14:05 2014
@@ -36,7 +36,7 @@ public class TestVariableResolver extend
   @Test
   public void testSimpleNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace("hello", ns);
     assertEquals("WORLD", vri.resolve("hello.world"));
@@ -61,10 +61,10 @@ public class TestVariableResolver extend
   @Test
   public void testNestedNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace("hello", ns);
-    ns = new HashMap<String,Object>();
+    ns = new HashMap<>();
     ns.put("world1", "WORLD1");
     vri.addNamespace("hello.my", ns);
     assertEquals("WORLD1", vri.resolve("hello.my.world1"));
@@ -73,10 +73,10 @@ public class TestVariableResolver extend
   @Test
   public void test3LevelNestedNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace("hello", ns);
-    ns = new HashMap<String,Object>();
+    ns = new HashMap<>();
     ns.put("world1", "WORLD1");
     vri.addNamespace("hello.my.new", ns);
     assertEquals("WORLD1", vri.resolve("hello.my.new.world1"));
@@ -87,7 +87,7 @@ public class TestVariableResolver extend
     VariableResolver vri = new VariableResolver();
     vri.setEvaluators(new DataImporter().getEvaluators(Collections
         .<Map<String,String>> emptyList()));
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     Date d = new Date();
     ns.put("dt", d);
     vri.addNamespace("A", ns);
@@ -115,7 +115,7 @@ public class TestVariableResolver extend
   @Test
   public void testDefaultNamespace() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace(null, ns);
     assertEquals("WORLD", vri.resolve("world"));
@@ -124,7 +124,7 @@ public class TestVariableResolver extend
   @Test
   public void testDefaultNamespace1() {
     VariableResolver vri = new VariableResolver();
-    Map<String,Object> ns = new HashMap<String,Object>();
+    Map<String,Object> ns = new HashMap<>();
     ns.put("world", "WORLD");
     vri.addNamespace(null, ns);
     assertEquals("WORLD", vri.resolve("world"));
@@ -133,8 +133,8 @@ public class TestVariableResolver extend
   @Test
   public void testFunctionNamespace1() throws Exception {
     VariableResolver resolver = new VariableResolver();
-    final List<Map<String,String>> l = new ArrayList<Map<String,String>>();
-    Map<String,String> m = new HashMap<String,String>();
+    final List<Map<String,String>> l = new ArrayList<>();
+    Map<String,String> m = new HashMap<>();
     m.put("name", "test");
     m.put("class", E.class.getName());
     l.add(m);

Modified: lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java Wed Mar 12 18:14:05 2014
@@ -57,7 +57,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(cdData), Context.FULL_DUMP, fields, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)
@@ -80,7 +80,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(testXml), Context.FULL_DUMP, fields, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)
@@ -109,7 +109,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(textMultipleDocuments), Context.FULL_DUMP, fields, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)
@@ -276,7 +276,7 @@ public class TestXPathEntityProcessor ex
     xPathEntityProcessor.blockingQueueTimeOutUnits = TimeUnit.MICROSECONDS;
     
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       if (rowsToRead >= 0 && result.size() >= rowsToRead) {
         Thread.currentThread().interrupt();
@@ -346,7 +346,7 @@ public class TestXPathEntityProcessor ex
             new VariableResolver(), getDataSource(cdData), Context.FULL_DUMP, null, entityAttrs);
     XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
     xPathEntityProcessor.init(c);
-    List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
+    List<Map<String, Object>> result = new ArrayList<>();
     while (true) {
       Map<String, Object> row = xPathEntityProcessor.nextRow();
       if (row == null)

Modified: lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathRecordReader.java Wed Mar 12 18:14:05 2014
@@ -136,8 +136,8 @@ public class TestXPathRecordReader exten
     rr.addField("a", "/root/x/b/@a", false);
     rr.addField("b", "/root/x/b/@b", false);
 
-    final List<Map<String, Object>> a = new ArrayList<Map<String, Object>>();
-    final List<Map<String, Object>> x = new ArrayList<Map<String, Object>>();
+    final List<Map<String, Object>> a = new ArrayList<>();
+    final List<Map<String, Object>> x = new ArrayList<>();
     rr.streamRecords(new StringReader(xml), new XPathRecordReader.Handler() {
       @Override
       public void handle(Map<String, Object> record, String xpath) {

Modified: lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestZKPropertiesWriter.java Wed Mar 12 18:14:05 2014
@@ -106,11 +106,11 @@ public class TestZKPropertiesWriter exte
     SimpleDateFormat df = new SimpleDateFormat(dateFormat, Locale.ROOT);
     Date oneSecondAgo = new Date(System.currentTimeMillis() - 1000);
 
-    Map<String, String> init = new HashMap<String, String>();
+    Map<String, String> init = new HashMap<>();
     init.put("dateFormat", dateFormat);
     ZKPropertiesWriter spw = new ZKPropertiesWriter();
     spw.init(new DataImporter(h.getCore(), "dataimport"), init);
-    Map<String, Object> props = new HashMap<String, Object>();
+    Map<String, Object> props = new HashMap<>();
     props.put("SomeDates.last_index_time", oneSecondAgo);
     props.put("last_index_time", oneSecondAgo);
     spw.persist(props);

Modified: lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TripleThreatTransformer.java Wed Mar 12 18:14:05 2014
@@ -36,17 +36,17 @@ import java.util.Map;
  */
 public class TripleThreatTransformer {
   public Object transformRow(Map<String, Object> row) {
-    List<Map<String, Object>> rows = new ArrayList<Map<String, Object>>(3);
+    List<Map<String, Object>> rows = new ArrayList<>(3);
     rows.add(row);
     rows.add(addDuplicateBackwardsValues(row));
-    rows.add(new LinkedHashMap<String,Object>(row));
+    rows.add(new LinkedHashMap<>(row));
     rows.get(2).put("AddAColumn_s", "Added");
     modifyIdColumn(rows.get(1), 1);
     modifyIdColumn(rows.get(2), 2);
     return rows;
   }
   private LinkedHashMap<String,Object> addDuplicateBackwardsValues(Map<String, Object> row) {
-    LinkedHashMap<String,Object> n = new LinkedHashMap<String,Object>();
+    LinkedHashMap<String,Object> n = new LinkedHashMap<>();
     for(Map.Entry<String,Object> entry : row.entrySet()) {
       String key = entry.getKey();
       if(!"id".equalsIgnoreCase(key)) {

Modified: lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java Wed Mar 12 18:14:05 2014
@@ -82,7 +82,7 @@ public class ExtractingRequestHandler ex
       }
       NamedList configDateFormats = (NamedList) initArgs.get(DATE_FORMATS);
       if (configDateFormats != null && configDateFormats.size() > 0) {
-        dateFormats = new HashSet<String>();
+        dateFormats = new HashSet<>();
         Iterator<Map.Entry> it = configDateFormats.iterator();
         while (it.hasNext()) {
           String format = (String) it.next().getValue();

Modified: lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java Wed Mar 12 18:14:05 2014
@@ -41,7 +41,7 @@ import org.slf4j.LoggerFactory;
 public class RegexRulesPasswordProvider implements PasswordProvider {
   private static final Logger log = LoggerFactory.getLogger(RegexRulesPasswordProvider.class);
   
-  private LinkedHashMap<Pattern,String> passwordMap = new LinkedHashMap<Pattern,String>(); 
+  private LinkedHashMap<Pattern,String> passwordMap = new LinkedHashMap<>();
   private String explicitPassword; 
   
   @Override
@@ -72,7 +72,7 @@ public class RegexRulesPasswordProvider 
    * @param is input stream for the file
    */
   public static LinkedHashMap<Pattern,String> parseRulesFile(InputStream is) {
-    LinkedHashMap<Pattern,String> rules = new LinkedHashMap<Pattern,String>();
+    LinkedHashMap<Pattern,String> rules = new LinkedHashMap<>();
     BufferedReader br = new BufferedReader(IOUtils.getDecodingReader(is, IOUtils.CHARSET_UTF_8));
     String line;
     try {

Modified: lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java Wed Mar 12 18:14:05 2014
@@ -57,7 +57,7 @@ public class SolrContentHandler extends 
   protected StringBuilder catchAllBuilder = new StringBuilder(2048);
   protected IndexSchema schema;
   protected Map<String, StringBuilder> fieldBuilders = Collections.emptyMap();
-  private LinkedList<StringBuilder> bldrStack = new LinkedList<StringBuilder>();
+  private LinkedList<StringBuilder> bldrStack = new LinkedList<>();
 
   protected boolean captureAttribs;
   protected boolean lowerNames;
@@ -89,7 +89,7 @@ public class SolrContentHandler extends 
     this.defaultField = params.get(DEFAULT_FIELD, "");
     String[] captureFields = params.getParams(CAPTURE_ELEMENTS);
     if (captureFields != null && captureFields.length > 0) {
-      fieldBuilders = new HashMap<String, StringBuilder>();
+      fieldBuilders = new HashMap<>();
       for (int i = 0; i < captureFields.length; i++) {
         fieldBuilders.put(captureFields[i], new StringBuilder());
       }
@@ -158,7 +158,7 @@ public class SolrContentHandler extends 
    */
   protected void addLiterals() {
     Iterator<String> paramNames = params.getParameterNamesIterator();
-    literalFieldNames = new HashSet<String>();
+    literalFieldNames = new HashSet<>();
     while (paramNames.hasNext()) {
       String pname = paramNames.next();
       if (!pname.startsWith(LITERALS_PREFIX)) continue;

Modified: lucene/dev/branches/branch_4x/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java Wed Mar 12 18:14:05 2014
@@ -616,7 +616,7 @@ public class ExtractingRequestHandlerTes
     try {
       // TODO: stop using locally defined streams once stream.file and
       // stream.body work everywhere
-      List<ContentStream> cs = new ArrayList<ContentStream>();
+      List<ContentStream> cs = new ArrayList<>();
       cs.add(new ContentStreamBase.FileStream(getFile(filename)));
       req.setContentStreams(cs);
       return h.queryAndResponse("/update/extract", req);

Modified: lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java Wed Mar 12 18:14:05 2014
@@ -53,7 +53,7 @@ public class LangDetectLanguageIdentifie
       Detector detector = DetectorFactory.create();
       detector.append(content);
       ArrayList<Language> langlist = detector.getProbabilities();
-      ArrayList<DetectedLanguage> solrLangList = new ArrayList<DetectedLanguage>();
+      ArrayList<DetectedLanguage> solrLangList = new ArrayList<>();
       for (Language l: langlist) {
         solrLangList.add(new DetectedLanguage(l.lang, l.prob));
       }

Modified: lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessorFactory.java Wed Mar 12 18:14:05 2014
@@ -125,7 +125,7 @@ public class LangDetectLanguageIdentifie
       return;
     }
     loaded = true;
-    List<String> profileData = new ArrayList<String>();
+    List<String> profileData = new ArrayList<>();
     Charset encoding = Charset.forName("UTF-8");
     for (String language : languages) {
       InputStream stream = LangDetectLanguageIdentifierUpdateProcessor.class.getResourceAsStream("langdetect-profiles/" + language);

Modified: lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java Wed Mar 12 18:14:05 2014
@@ -107,7 +107,7 @@ public abstract class LanguageIdentifier
         fallbackFields = params.get(FALLBACK_FIELDS).split(",");
       }
       overwrite = params.getBool(OVERWRITE, false);
-      langWhitelist = new HashSet<String>();
+      langWhitelist = new HashSet<>();
       threshold = params.getDouble(THRESHOLD, DOCID_THRESHOLD_DEFAULT);
       if(params.get(LANG_WHITELIST, "").length() > 0) {
         for(String lang : params.get(LANG_WHITELIST, "").split(",")) {
@@ -133,15 +133,15 @@ public abstract class LanguageIdentifier
       } else {
         mapIndividualFields = mapFields;
       }
-      mapIndividualFieldsSet = new HashSet<String>(Arrays.asList(mapIndividualFields));
+      mapIndividualFieldsSet = new HashSet<>(Arrays.asList(mapIndividualFields));
       // Compile a union of the lists of fields to map
-      allMapFieldsSet = new HashSet<String>(Arrays.asList(mapFields));
+      allMapFieldsSet = new HashSet<>(Arrays.asList(mapFields));
       if(Arrays.equals(mapFields, mapIndividualFields)) {
         allMapFieldsSet.addAll(mapIndividualFieldsSet);
       }
 
       // Normalize detected langcode onto normalized langcode
-      lcMap = new HashMap<String,String>();
+      lcMap = new HashMap<>();
       if(params.get(LCMAP) != null) {
         for(String mapping : params.get(LCMAP).split("[, ]")) {
           String[] keyVal = mapping.split(":");
@@ -154,7 +154,7 @@ public abstract class LanguageIdentifier
       }
 
       // Language Code mapping
-      mapLcMap = new HashMap<String,String>();
+      mapLcMap = new HashMap<>();
       if(params.get(MAP_LCMAP) != null) {
         for(String mapping : params.get(MAP_LCMAP).split("[, ]")) {
           String[] keyVal = mapping.split(":");
@@ -199,7 +199,7 @@ public abstract class LanguageIdentifier
    */
   protected SolrInputDocument process(SolrInputDocument doc) {
     String docLang = null;
-    HashSet<String> docLangs = new HashSet<String>();
+    HashSet<String> docLangs = new HashSet<>();
     String fallbackLang = getFallbackLang(doc, fallbackFields, fallbackValue);
 
     if(langField == null || !doc.containsKey(langField) || (doc.containsKey(langField) && overwrite)) {
@@ -323,7 +323,7 @@ public abstract class LanguageIdentifier
    * @return a string of the chosen language
    */
   protected String resolveLanguage(String language, String fallbackLang) {
-    List<DetectedLanguage> l = new ArrayList<DetectedLanguage>();
+    List<DetectedLanguage> l = new ArrayList<>();
     l.add(new DetectedLanguage(language, 1.0));
     return resolveLanguage(l, fallbackLang);
   }

Modified: lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java Wed Mar 12 18:14:05 2014
@@ -41,7 +41,7 @@ public class TikaLanguageIdentifierUpdat
   
   @Override
   protected List<DetectedLanguage> detectLanguage(String content) {
-    List<DetectedLanguage> languages = new ArrayList<DetectedLanguage>();
+    List<DetectedLanguage> languages = new ArrayList<>();
     if(content.trim().length() != 0) { 
       LanguageIdentifier identifier = new LanguageIdentifier(content);
       // FIXME: Hack - we get the distance from toString and calculate our own certainty score

Modified: lucene/dev/branches/branch_4x/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/langid/src/test/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessorFactoryTestCase.java Wed Mar 12 18:14:05 2014
@@ -124,7 +124,7 @@ public abstract class LanguageIdentifier
     assertEquals("zh", liProcessor.resolveLanguage("zh_cn", "NA"));
     assertEquals("zh", liProcessor.resolveLanguage("zh_tw", "NA"));
     assertEquals("no", liProcessor.resolveLanguage("no", "NA"));
-    List<DetectedLanguage> langs = new ArrayList<DetectedLanguage>();
+    List<DetectedLanguage> langs = new ArrayList<>();
     langs.add(new DetectedLanguage("zh_cn", 0.8));
     assertEquals("zh", liProcessor.resolveLanguage(langs, "NA"));
   }
@@ -246,7 +246,7 @@ public abstract class LanguageIdentifier
     liProcessor = createLangIdProcessor(parameters);
 
     // No detected languages
-    langs = new ArrayList<DetectedLanguage>();
+    langs = new ArrayList<>();
     assertEquals("", liProcessor.resolveLanguage(langs, null));
     assertEquals("fallback", liProcessor.resolveLanguage(langs, "fallback"));
 
@@ -255,7 +255,7 @@ public abstract class LanguageIdentifier
     assertEquals("one", liProcessor.resolveLanguage(langs, "fallback"));    
 
     // One detected language under default threshold
-    langs = new ArrayList<DetectedLanguage>();
+    langs = new ArrayList<>();
     langs.add(new DetectedLanguage("under", 0.1));
     assertEquals("fallback", liProcessor.resolveLanguage(langs, "fallback"));    
   }

Modified: lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java Wed Mar 12 18:14:05 2014
@@ -81,7 +81,7 @@ class BatchWriter {
     private UpdateResponse result;
 
     public Batch(Collection<SolrInputDocument> batch) {
-      documents = new ArrayList<SolrInputDocument>(batch);
+      documents = new ArrayList<>(batch);
     }
 
     public void run() {
@@ -111,7 +111,7 @@ class BatchWriter {
 
     protected void reset(List<SolrInputDocument> documents) {
       if (this.documents == null) {
-        this.documents = new ArrayList<SolrInputDocument>(documents);
+        this.documents = new ArrayList<>(documents);
       } else {
         this.documents.clear();
         this.documents.addAll(documents);
@@ -121,7 +121,7 @@ class BatchWriter {
 
     protected void reset(SolrInputDocument document) {
       if (this.documents == null) {
-        this.documents = new ArrayList<SolrInputDocument>();
+        this.documents = new ArrayList<>();
       } else {
         this.documents.clear();
       }

Modified: lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java Wed Mar 12 18:14:05 2014
@@ -59,8 +59,8 @@ class GoLive {
         new LinkedBlockingQueue<Runnable>());
     
     try {
-      CompletionService<Request> completionService = new ExecutorCompletionService<Request>(executor);
-      Set<Future<Request>> pending = new HashSet<Future<Request>>();
+      CompletionService<Request> completionService = new ExecutorCompletionService<>(executor);
+      Set<Future<Request>> pending = new HashSet<>();
       int cnt = -1;
       for (final FileStatus dir : outDirs) {
         

Modified: lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/MapReduceIndexerTool.java Wed Mar 12 18:14:05 2014
@@ -541,7 +541,7 @@ public class MapReduceIndexerTool extend
 
   static List<List<String>> buildShardUrls(List<Object> urls, Integer numShards) {
     if (urls == null) return null;
-    List<List<String>> shardUrls = new ArrayList<List<String>>(urls.size());
+    List<List<String>> shardUrls = new ArrayList<>(urls.size());
     List<String> list = null;
     
     int sz;
@@ -551,7 +551,7 @@ public class MapReduceIndexerTool extend
     sz = (int) Math.ceil(urls.size() / (float)numShards);
     for (int i = 0; i < urls.size(); i++) {
       if (i % sz == 0) {
-        list = new ArrayList<String>();
+        list = new ArrayList<>();
         shardUrls.add(list);
       }
       list.add((String) urls.get(i));

Modified: lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrOutputFormat.java Wed Mar 12 18:14:05 2014
@@ -158,7 +158,7 @@ public class SolrOutputFormat<K, V> exte
     Utils.getLogConfigFile(context.getConfiguration());
     Path workDir = getDefaultWorkFile(context, "");
     int batchSize = getBatchSize(context.getConfiguration());
-    return new SolrRecordWriter<K, V>(context, workDir, batchSize);
+    return new SolrRecordWriter<>(context, workDir, batchSize);
   }
 
   public static void setupSolrHomeCache(File solrHomeDir, Job job) throws IOException{
@@ -202,7 +202,7 @@ public class SolrOutputFormat<K, V> exte
   }
 
   private static void createZip(File dir, File out) throws IOException {
-    HashSet<File> files = new HashSet<File>();
+    HashSet<File> files = new HashSet<>();
     // take only conf/ and lib/
     for (String allowedDirectory : SolrRecordWriter
         .getAllowedConfigDirectories()) {

Modified: lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/SolrRecordWriter.java Wed Mar 12 18:14:05 2014
@@ -53,10 +53,10 @@ class SolrRecordWriter<K, V> extends Rec
   
   private static final Logger LOG = LoggerFactory.getLogger(SolrRecordWriter.class);
 
-  public final static List<String> allowedConfigDirectories = new ArrayList<String>(
+  public final static List<String> allowedConfigDirectories = new ArrayList<>(
       Arrays.asList(new String[] { "conf", "lib", "solr.xml" }));
 
-  public final static Set<String> requiredConfigDirectories = new HashSet<String>();
+  public final static Set<String> requiredConfigDirectories = new HashSet<>();
   
   static {
     requiredConfigDirectories.add("conf");
@@ -101,7 +101,7 @@ class SolrRecordWriter<K, V> extends Rec
   private long numDocsWritten = 0;
   private long nextLogTime = System.nanoTime();
 
-  private static HashMap<TaskID, Reducer<?,?,?,?>.Context> contextMap = new HashMap<TaskID, Reducer<?,?,?,?>.Context>();
+  private static HashMap<TaskID, Reducer<?,?,?,?>.Context> contextMap = new HashMap<>();
   
   public SolrRecordWriter(TaskAttemptContext context, Path outputShardDir, int batchSize) {
     this.batchSize = batchSize;

Modified: lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/ZooKeeperInspector.java Wed Mar 12 18:14:05 2014
@@ -55,14 +55,14 @@ final class ZooKeeperInspector {
 
     DocCollection docCollection = extractDocCollection(zkHost, collection);
     List<Slice> slices = getSortedSlices(docCollection.getSlices());
-    List<List<String>> solrUrls = new ArrayList<List<String>>(slices.size());
+    List<List<String>> solrUrls = new ArrayList<>(slices.size());
     for (Slice slice : slices) {
       if (slice.getLeader() == null) {
         throw new IllegalArgumentException("Cannot find SolrCloud slice leader. " +
             "It looks like not all of your shards are registered in ZooKeeper yet");
       }
       Collection<Replica> replicas = slice.getReplicas();
-      List<String> urls = new ArrayList<String>(replicas.size());
+      List<String> urls = new ArrayList<>(replicas.size());
       for (Replica replica : replicas) {
         ZkCoreNodeProps props = new ZkCoreNodeProps(replica);
         urls.add(props.getCoreUrl());

Modified: lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/LineRandomizerMapperReducerTest.java Wed Mar 12 18:14:05 2014
@@ -49,12 +49,12 @@ public class LineRandomizerMapperReducer
   @Test
   public void testMapReduce2Items() throws IOException {
     mapReduceDriver.withAll(Arrays.asList(
-        new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
-        new Pair<LongWritable, Text>(new LongWritable(1), new Text("world"))
+        new Pair<>(new LongWritable(0), new Text("hello")),
+        new Pair<>(new LongWritable(1), new Text("world"))
         ));
     mapReduceDriver.withAllOutput(Arrays.asList(
-        new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())
+        new Pair<>(new Text("world"), NullWritable.get()),
+        new Pair<>(new Text("hello"), NullWritable.get())
         ));
     mapReduceDriver.runTest();
   }
@@ -62,14 +62,14 @@ public class LineRandomizerMapperReducer
   @Test
   public void testMapReduce3Items() throws IOException {
     mapReduceDriver.withAll(Arrays.asList(
-        new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
-        new Pair<LongWritable, Text>(new LongWritable(1), new Text("world")),
-        new Pair<LongWritable, Text>(new LongWritable(2), new Text("nadja"))
+        new Pair<>(new LongWritable(0), new Text("hello")),
+        new Pair<>(new LongWritable(1), new Text("world")),
+        new Pair<>(new LongWritable(2), new Text("nadja"))
         ));
     mapReduceDriver.withAllOutput(Arrays.asList(
-        new Pair<Text, NullWritable>(new Text("nadja"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())
+        new Pair<>(new Text("nadja"), NullWritable.get()),
+        new Pair<>(new Text("world"), NullWritable.get()),
+        new Pair<>(new Text("hello"), NullWritable.get())
         ));
     mapReduceDriver.runTest();
   }
@@ -77,16 +77,16 @@ public class LineRandomizerMapperReducer
   @Test
   public void testMapReduce4Items() throws IOException {
     mapReduceDriver.withAll(Arrays.asList(
-        new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
-        new Pair<LongWritable, Text>(new LongWritable(1), new Text("world")),
-        new Pair<LongWritable, Text>(new LongWritable(2), new Text("nadja")),
-        new Pair<LongWritable, Text>(new LongWritable(3), new Text("basti"))
+        new Pair<>(new LongWritable(0), new Text("hello")),
+        new Pair<>(new LongWritable(1), new Text("world")),
+        new Pair<>(new LongWritable(2), new Text("nadja")),
+        new Pair<>(new LongWritable(3), new Text("basti"))
         ));
     mapReduceDriver.withAllOutput(Arrays.asList(
-        new Pair<Text, NullWritable>(new Text("nadja"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("basti"), NullWritable.get()),
-        new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())
+        new Pair<>(new Text("nadja"), NullWritable.get()),
+        new Pair<>(new Text("world"), NullWritable.get()),
+        new Pair<>(new Text("basti"), NullWritable.get()),
+        new Pair<>(new Text("hello"), NullWritable.get())
         ));
     mapReduceDriver.runTest();
   }

Modified: lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java Wed Mar 12 18:14:05 2014
@@ -245,7 +245,7 @@ public class MorphlineGoLiveMiniMRTest e
   public void testBuildShardUrls() throws Exception {
     // 2x3
     Integer numShards = 2;
-    List<Object> urls = new ArrayList<Object>();
+    List<Object> urls = new ArrayList<>();
     urls.add("shard1");
     urls.add("shard2");
     urls.add("shard3");
@@ -303,7 +303,7 @@ public class MorphlineGoLiveMiniMRTest e
     // null shards 3x1
     numShards = null;
     
-    urls = new ArrayList<Object>();
+    urls = new ArrayList<>();
     urls.add("shard1");
     urls.add("shard2");
     urls.add("shard3");
@@ -318,7 +318,7 @@ public class MorphlineGoLiveMiniMRTest e
     
     // 2x(2,3) off balance
     numShards = 2;
-    urls = new ArrayList<Object>();
+    urls = new ArrayList<>();
     urls.add("shard1");
     urls.add("shard2");
     urls.add("shard3");
@@ -328,7 +328,7 @@ public class MorphlineGoLiveMiniMRTest e
 
     assertEquals(shardUrls.toString(), 2, shardUrls.size());
     
-    Set<Integer> counts = new HashSet<Integer>();
+    Set<Integer> counts = new HashSet<>();
     counts.add(shardUrls.get(0).size());
     counts.add(shardUrls.get(1).size());
     
@@ -388,7 +388,7 @@ public class MorphlineGoLiveMiniMRTest e
         "--go-live"
     };
     args = prependInitialArgs(args);
-    List<String> argList = new ArrayList<String>();
+    List<String> argList = new ArrayList<>();
     getShardUrlArgs(argList);
     args = concat(args, argList.toArray(new String[0]));
     
@@ -418,7 +418,7 @@ public class MorphlineGoLiveMiniMRTest e
         "--go-live-threads", Integer.toString(random().nextInt(15) + 1)
     };
     args = prependInitialArgs(args);
-    argList = new ArrayList<String>();
+    argList = new ArrayList<>();
     getShardUrlArgs(argList);
     args = concat(args, argList.toArray(new String[0]));
     
@@ -608,7 +608,7 @@ public class MorphlineGoLiveMiniMRTest e
     };
     args = prependInitialArgs(args);
 
-    argList = new ArrayList<String>();
+    argList = new ArrayList<>();
     getShardUrlArgs(argList, replicatedCollection);
     args = concat(args, argList.toArray(new String[0]));
     

Modified: lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java Wed Mar 12 18:14:05 2014
@@ -106,7 +106,7 @@ public class MorphlineReducerTest extend
       Configuration config = reduceDriver.getConfiguration();
       setupHadoopConfig(config);
       
-      List<SolrInputDocumentWritable> values = new ArrayList<SolrInputDocumentWritable>();
+      List<SolrInputDocumentWritable> values = new ArrayList<>();
       SolrInputDocument sid = new SolrInputDocument();
       String id = "myid1";
       sid.addField("id", id);

Modified: lucene/dev/branches/branch_4x/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java Wed Mar 12 18:14:05 2014
@@ -151,7 +151,7 @@ public final class SolrCellBuilder imple
         cellParams.put(ExtractingParams.XPATH_EXPRESSION, xpathExpr);
       }
       
-      this.dateFormats = getConfigs().getStringList(config, "dateFormats", new ArrayList<String>(DateUtil.DEFAULT_DATE_FORMATS));
+      this.dateFormats = getConfigs().getStringList(config, "dateFormats", new ArrayList<>(DateUtil.DEFAULT_DATE_FORMATS));
       
       String handlerStr = getConfigs().getString(config, "solrContentHandlerFactory", TrimSolrContentHandlerFactory.class.getName());
       Class<? extends SolrContentHandlerFactory> factoryClass;
@@ -165,7 +165,7 @@ public final class SolrCellBuilder imple
 
       this.locale = getLocale(getConfigs().getString(config, "locale", ""));
       
-      this.mediaTypeToParserMap = new HashMap<MediaType, Parser>();
+      this.mediaTypeToParserMap = new HashMap<>();
       //MimeTypes mimeTypes = MimeTypes.getDefaultMimeTypes(); // FIXME getMediaTypeRegistry.normalize() 
 
       List<? extends Config> parserConfigs = getConfigs().getConfigList(config, "parsers");

Modified: lucene/dev/branches/branch_4x/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java Wed Mar 12 18:14:05 2014
@@ -37,8 +37,8 @@ import org.junit.Test;
 
 public class SolrCellMorphlineTest extends AbstractSolrMorphlineTestBase {
 
-  private Map<String,Integer> expectedRecords = new HashMap<String,Integer>();
-  private Map<String, Map<String, Object>> expectedRecordContents = new HashMap<String, Map<String, Object>>();
+  private Map<String,Integer> expectedRecords = new HashMap<>();
+  private Map<String, Map<String, Object>> expectedRecordContents = new HashMap<>();
   @BeforeClass
   public static void beforeClass2() {
     assumeFalse("FIXME: Morphlines currently has issues with Windows paths", Constants.WINDOWS);

Modified: lucene/dev/branches/branch_4x/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/CollectingDocumentLoader.java Wed Mar 12 18:14:05 2014
@@ -31,8 +31,8 @@ import org.slf4j.LoggerFactory;
 class CollectingDocumentLoader implements DocumentLoader {
 
   private final int batchSize;
-  private final List<SolrInputDocument> batch = new ArrayList<SolrInputDocument> ();
-  private List<SolrInputDocument> results = new ArrayList<SolrInputDocument> ();
+  private final List<SolrInputDocument> batch = new ArrayList<> ();
+  private List<SolrInputDocument> results = new ArrayList<> ();
 
   private static final Logger LOGGER = LoggerFactory.getLogger(CollectingDocumentLoader.class);
 

Modified: lucene/dev/branches/branch_4x/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java Wed Mar 12 18:14:05 2014
@@ -65,7 +65,7 @@ public class SolrUIMAConfigurationReader
 
   @SuppressWarnings("rawtypes")
   private Map<String, Map<String, MapField>> readTypesFeaturesFieldsMapping() {
-    Map<String, Map<String, MapField>> map = new HashMap<String, Map<String, MapField>>();
+    Map<String, Map<String, MapField>> map = new HashMap<>();
 
     NamedList fieldMappings = (NamedList) args.get("fieldMappings");
     /* iterate over UIMA types */
@@ -73,7 +73,7 @@ public class SolrUIMAConfigurationReader
       NamedList type = (NamedList) fieldMappings.get("type", i);
       String typeName = (String)type.get("name");
 
-      Map<String, MapField> subMap = new HashMap<String, MapField>();
+      Map<String, MapField> subMap = new HashMap<>();
       /* iterate over mapping definitions */
       for(int j = 0; j < type.size() - 1; j++){
         NamedList mapping = (NamedList) type.get("mapping", j + 1);
@@ -96,7 +96,7 @@ public class SolrUIMAConfigurationReader
 
   @SuppressWarnings("rawtypes")
   private Map<String, Object> readAEOverridingParameters() {
-    Map<String, Object> runtimeParameters = new HashMap<String, Object>();
+    Map<String, Object> runtimeParameters = new HashMap<>();
     NamedList runtimeParams = (NamedList) args.get("runtimeParameters");
     for (int i = 0; i < runtimeParams.size(); i++) {
       String name = runtimeParams.getName(i);

Modified: lucene/dev/branches/branch_4x/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java Wed Mar 12 18:14:05 2014
@@ -192,7 +192,7 @@ public class UIMAUpdateRequestProcessorT
   }
 
   private void addDoc(String chain, String doc) throws Exception {
-    Map<String, String[]> params = new HashMap<String, String[]>();
+    Map<String, String[]> params = new HashMap<>();
     params.put(UpdateParams.UPDATE_CHAIN, new String[] { chain });
     MultiMapSolrParams mmparams = new MultiMapSolrParams(params);
     SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), (SolrParams) mmparams) {
@@ -200,7 +200,7 @@ public class UIMAUpdateRequestProcessorT
 
     UpdateRequestHandler handler = new UpdateRequestHandler();
     handler.init(null);
-    ArrayList<ContentStream> streams = new ArrayList<ContentStream>(2);
+    ArrayList<ContentStream> streams = new ArrayList<>(2);
     streams.add(new ContentStreamBase.StringStream(doc));
     req.setContentStreams(streams);
     handler.handleRequestBody(req, new SolrQueryResponse());

Modified: lucene/dev/branches/branch_4x/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java (original)
+++ lucene/dev/branches/branch_4x/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java Wed Mar 12 18:14:05 2014
@@ -31,7 +31,7 @@ import java.util.Iterator;
 import java.util.Map;
 
 public class SolrParamResourceLoader extends ResourceLoader {
-  private Map<String,String> templates = new HashMap<String,String>();
+  private Map<String,String> templates = new HashMap<>();
   public SolrParamResourceLoader(SolrQueryRequest request) {
     super();
 

Modified: lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/SolrLogFormatter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/SolrLogFormatter.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/SolrLogFormatter.java (original)
+++ lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/SolrLogFormatter.java Wed Mar 12 18:14:05 2014
@@ -47,7 +47,7 @@ public class SolrLogFormatter extends Fo
 
   long startTime = System.currentTimeMillis();
   long lastTime = startTime;
-  Map<Method, String> methodAlias = new HashMap<Method, String>();
+  Map<Method, String> methodAlias = new HashMap<>();
   
   public static class Method {
     public String className;
@@ -106,9 +106,9 @@ public class SolrLogFormatter extends Fo
     Map<String, Object> coreProps;
   }
 
-  Map<SolrCore, CoreInfo> coreInfoMap = new WeakHashMap<SolrCore, CoreInfo>();    // TODO: use something that survives across a core reload?
+  Map<SolrCore, CoreInfo> coreInfoMap = new WeakHashMap<>();    // TODO: use something that survives across a core reload?
 
-  public Map<String,String> classAliases = new HashMap<String, String>();
+  public Map<String,String> classAliases = new HashMap<>();
 
   @Override
   public String format(LogRecord record) {
@@ -379,7 +379,7 @@ sb.append("(group_name=").append(tg.getN
 
 
 
-  static ThreadLocal<String> threadLocal = new ThreadLocal<String>();
+  static ThreadLocal<String> threadLocal = new ThreadLocal<>();
   
   public static void main(String[] args) throws Exception {
 

Modified: lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java (original)
+++ lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java Wed Mar 12 18:14:05 2014
@@ -775,7 +775,7 @@ public class LegacyHTMLStripCharFilter e
 
   private static final HashMap<String,Character> entityTable;
   static {
-    entityTable = new HashMap<String,Character>();
+    entityTable = new HashMap<>();
     // entityName and entityVal generated from the python script
     // included in comments at the end of this file.
     final String[] entityName={ "zwnj","aring","gt","yen","ograve","Chi","delta","rang","sup","trade","Ntilde","xi","upsih","nbsp","Atilde","radic","otimes","aelig","oelig","equiv","ni","infin","Psi","auml","cup","Epsilon","otilde","lt","Icirc","Eacute","Lambda","sbquo","Prime","prime","psi","Kappa","rsaquo","Tau","uacute","ocirc","lrm","zwj","cedil","Alpha","not","amp","AElig","oslash","acute","lceil","alefsym","laquo","shy","loz","ge","Igrave","nu","Ograve","lsaquo","sube","euro","rarr","sdot","rdquo","Yacute","lfloor","lArr","Auml","Dagger","brvbar","Otilde","szlig","clubs","diams","agrave","Ocirc","Iota","Theta","Pi","zeta","Scaron","frac14","egrave","sub","iexcl","frac12","ordf","sum","prop","Uuml","ntilde","atilde","asymp","uml","prod","nsub","reg","rArr","Oslash","emsp","THORN","yuml","aacute","Mu","hArr","le","thinsp","dArr","ecirc","bdquo","Sigma","Aring","tilde","nabla","mdash","uarr","times","Ugrave","Eta","Agrave","chi","real","circ","eth","rceil","iuml","gamma","lambda"
 ,"harr","Egrave","frac34","dagger","divide","Ouml","image","ndash","hellip","igrave","Yuml","ang","alpha","frasl","ETH","lowast","Nu","plusmn","bull","sup1","sup2","sup3","Aacute","cent","oline","Beta","perp","Delta","there4","pi","iota","empty","euml","notin","iacute","para","epsilon","weierp","OElig","uuml","larr","icirc","Upsilon","omicron","upsilon","copy","Iuml","Oacute","Xi","kappa","ccedil","Ucirc","cap","mu","scaron","lsquo","isin","Zeta","minus","deg","and","tau","pound","curren","int","ucirc","rfloor","ensp","crarr","ugrave","exist","cong","theta","oplus","permil","Acirc","piv","Euml","Phi","Iacute","quot","Uacute","Omicron","ne","iquest","eta","rsquo","yacute","Rho","darr","Ecirc","Omega","acirc","sim","phi","sigmaf","macr","thetasym","Ccedil","ordm","uArr","forall","beta","fnof","rho","micro","eacute","omega","middot","Gamma","rlm","lang","spades","supe","thorn","ouml","or","raquo","part","sect","ldquo","hearts","sigma","oacute"};

Modified: lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java (original)
+++ lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java Wed Mar 12 18:14:05 2014
@@ -90,7 +90,7 @@ public class JettySolrRunner {
   private String coreNodeName;
 
   /** Maps servlet holders (i.e. factories: class + init params) to path specs */
-  private SortedMap<ServletHolder,String> extraServlets = new TreeMap<ServletHolder,String>();
+  private SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
   private SortedMap<Class,String> extraRequestFilters;
   private LinkedList<FilterHolder> extraFilters;
 
@@ -106,7 +106,7 @@ public class JettySolrRunner {
     }
 
     // TODO: keep track of certain number of last requests
-    private LinkedList<HttpServletRequest> requests = new LinkedList<HttpServletRequest>();
+    private LinkedList<HttpServletRequest> requests = new LinkedList<>();
 
 
     @Override
@@ -188,7 +188,7 @@ public class JettySolrRunner {
       SortedMap<Class,String> extraRequestFilters) {
     if (null != extraServlets) { this.extraServlets.putAll(extraServlets); }
     if (null != extraRequestFilters) {
-      this.extraRequestFilters = new TreeMap<Class,String>(extraRequestFilters.comparator());
+      this.extraRequestFilters = new TreeMap<>(extraRequestFilters.comparator());
       this.extraRequestFilters.putAll(extraRequestFilters);
     }
     this.solrConfigFilename = solrConfigFilename;
@@ -316,7 +316,7 @@ public class JettySolrRunner {
 //        FilterHolder fh = new FilterHolder(filter);
         debugFilter = root.addFilter(DebugFilter.class, "*", EnumSet.of(DispatcherType.REQUEST) );
         if (extraRequestFilters != null) {
-          extraFilters = new LinkedList<FilterHolder>();
+          extraFilters = new LinkedList<>();
           for (Class filterClass : extraRequestFilters.keySet()) {
             extraFilters.add(root.addFilter(filterClass, extraRequestFilters.get(filterClass),
               EnumSet.of(DispatcherType.REQUEST)));

Modified: lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/Assign.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/Assign.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/Assign.java (original)
+++ lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/Assign.java Wed Mar 12 18:14:05 2014
@@ -86,7 +86,7 @@ public class Assign {
       return "shard1";
     }
 
-    List<String> shardIdNames = new ArrayList<String>(sliceMap.keySet());
+    List<String> shardIdNames = new ArrayList<>(sliceMap.keySet());
 
     if (shardIdNames.size() < numShards) {
       return "shard" + (shardIdNames.size() + 1);
@@ -95,7 +95,7 @@ public class Assign {
     // TODO: don't need to sort to find shard with fewest replicas!
 
     // else figure out which shard needs more replicas
-    final Map<String, Integer> map = new HashMap<String, Integer>();
+    final Map<String, Integer> map = new HashMap<>();
     for (String shardId : shardIdNames) {
       int cnt = sliceMap.get(shardId).getReplicasMap().size();
       map.put(shardId, cnt);
@@ -135,12 +135,12 @@ public class Assign {
 
     Set<String> nodes = clusterState.getLiveNodes();
 
-    List<String> nodeList = new ArrayList<String>(nodes.size());
+    List<String> nodeList = new ArrayList<>(nodes.size());
     nodeList.addAll(nodes);
     if (createNodeList != null) nodeList.retainAll(createNodeList);
 
 
-    HashMap<String,Node> nodeNameVsShardCount =  new HashMap<String, Node>();
+    HashMap<String,Node> nodeNameVsShardCount =  new HashMap<>();
     for (String s : nodeList) nodeNameVsShardCount.put(s,new Node(s));
     for (String s : clusterState.getCollections()) {
       DocCollection c = clusterState.getCollection(s);

Modified: lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java (original)
+++ lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java Wed Mar 12 18:14:05 2014
@@ -82,7 +82,7 @@ public class DistributedQueue {
    */
   private TreeMap<Long,String> orderedChildren(Watcher watcher)
       throws KeeperException, InterruptedException {
-    TreeMap<Long,String> orderedChildren = new TreeMap<Long,String>();
+    TreeMap<Long,String> orderedChildren = new TreeMap<>();
     
     List<String> childNames = null;
     try {

Modified: lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java (original)
+++ lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java Wed Mar 12 18:14:05 2014
@@ -212,7 +212,7 @@ public  class LeaderElector {
    * @return int seqs
    */
   private List<Integer> getSeqs(List<String> seqs) {
-    List<Integer> intSeqs = new ArrayList<Integer>(seqs.size());
+    List<Integer> intSeqs = new ArrayList<>(seqs.size());
     for (String seq : seqs) {
       intSeqs.add(getSeq(seq));
     }

Modified: lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/Overseer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/Overseer.java?rev=1576837&r1=1576836&r2=1576837&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/Overseer.java (original)
+++ lucene/dev/branches/branch_4x/solr/core/src/java/org/apache/solr/cloud/Overseer.java Wed Mar 12 18:14:05 2014
@@ -337,7 +337,7 @@ public class Overseer {
         return clusterState;
       }
 
-      ArrayList<String> shardNames = new ArrayList<String>();
+      ArrayList<String> shardNames = new ArrayList<>();
 
       if(ImplicitDocRouter.NAME.equals( message.getStr("router.name",DocRouter.DEFAULT_NAME))){
         getShardNames(shardNames,message.getStr("shards",DocRouter.DEFAULT_NAME));
@@ -392,10 +392,10 @@ public class Overseer {
 
       Map<String, RoutingRule> routingRules = slice.getRoutingRules();
       if (routingRules == null)
-        routingRules = new HashMap<String, RoutingRule>();
+        routingRules = new HashMap<>();
       RoutingRule r = routingRules.get(routeKey);
       if (r == null) {
-        Map<String, Object> map = new HashMap<String, Object>();
+        Map<String, Object> map = new HashMap<>();
         map.put("routeRanges", range);
         map.put("targetCollection", targetCollection);
         map.put("expireAt", expireAt);
@@ -463,7 +463,7 @@ public class Overseer {
       Slice slice = clusterState.getSlice(collection, shardId);
       if (slice == null)  {
         Map<String, Replica> replicas = Collections.EMPTY_MAP;
-        Map<String, Object> sliceProps = new HashMap<String, Object>();
+        Map<String, Object> sliceProps = new HashMap<>();
         String shardRange = message.getStr(ZkStateReader.SHARD_RANGE_PROP);
         String shardState = message.getStr(ZkStateReader.SHARD_STATE_PROP);
         String shardParent = message.getStr(ZkStateReader.SHARD_PARENT_PROP);
@@ -530,7 +530,7 @@ public class Overseer {
         Integer numShards = message.getInt(ZkStateReader.NUM_SHARDS_PROP, null);
         log.info("Update state numShards={} message={}", numShards, message);
 
-        List<String> shardNames  = new ArrayList<String>();
+        List<String> shardNames  = new ArrayList<>();
 
         //collection does not yet exist, create placeholders if num shards is specified
         boolean collectionExists = clusterState.hasCollection(collection);
@@ -574,7 +574,7 @@ public class Overseer {
 
         Slice slice = clusterState.getSlice(collection, sliceName);
         
-        Map<String,Object> replicaProps = new LinkedHashMap<String,Object>();
+        Map<String,Object> replicaProps = new LinkedHashMap<>();
 
         replicaProps.putAll(message.getProperties());
         // System.out.println("########## UPDATE MESSAGE: " + JSONUtil.toJSON(message));
@@ -594,7 +594,7 @@ public class Overseer {
           
           // remove any props with null values
           Set<Entry<String,Object>> entrySet = replicaProps.entrySet();
-          List<String> removeKeys = new ArrayList<String>();
+          List<String> removeKeys = new ArrayList<>();
           for (Entry<String,Object> entry : entrySet) {
             if (entry.getValue() == null) {
               removeKeys.add(entry.getKey());
@@ -624,8 +624,8 @@ public class Overseer {
             sliceProps = slice.getProperties();
             replicas = slice.getReplicasCopy();
           } else {
-            replicas = new HashMap<String, Replica>(1);
-            sliceProps = new HashMap<String, Object>();
+            replicas = new HashMap<>(1);
+            sliceProps = new HashMap<>();
             sliceProps.put(Slice.RANGE, shardRange);
             sliceProps.put(Slice.STATE, shardState);
             sliceProps.put(Slice.PARENT, shardParent);
@@ -661,8 +661,8 @@ public class Overseer {
           if (allActive)  {
             log.info("Shard: {} - all replicas are active. Finding status of fellow sub-shards", sliceName);
             // find out about other sub shards
-            Map<String, Slice> allSlicesCopy = new HashMap<String, Slice>(state.getSlicesMap(collection));
-            List<Slice> subShardSlices = new ArrayList<Slice>();
+            Map<String, Slice> allSlicesCopy = new HashMap<>(state.getSlicesMap(collection));
+            List<Slice> subShardSlices = new ArrayList<>();
             outer:
             for (Entry<String, Slice> entry : allSlicesCopy.entrySet()) {
               if (sliceName.equals(entry.getKey()))
@@ -688,7 +688,7 @@ public class Overseer {
               log.info("Shard: {} - All replicas across all fellow sub-shards are now ACTIVE. Preparing to switch shard states.", sliceName);
               String parentSliceName = (String) sliceProps.remove(Slice.PARENT);
 
-              Map<String, Object> propMap = new HashMap<String, Object>();
+              Map<String, Object> propMap = new HashMap<>();
               propMap.put(Overseer.QUEUE_OPERATION, "updateshardstate");
               propMap.put(parentSliceName, Slice.INACTIVE);
               propMap.put(sliceName, Slice.ACTIVE);
@@ -717,7 +717,7 @@ public class Overseer {
 //        Map<String, DocCollection> newCollections = new LinkedHashMap<String,DocCollection>();
 
 
-        Map<String, Slice> newSlices = new LinkedHashMap<String,Slice>();
+        Map<String, Slice> newSlices = new LinkedHashMap<>();
 //        newCollections.putAll(state.getCollectionStates());
         for (int i = 0; i < shards.size(); i++) {
           String sliceName = shards.get(i);
@@ -725,14 +725,14 @@ public class Overseer {
         for (int i = 0; i < numShards; i++) {
           final String sliceName = "shard" + (i+1);*/
 
-          Map<String, Object> sliceProps = new LinkedHashMap<String, Object>(1);
+          Map<String, Object> sliceProps = new LinkedHashMap<>(1);
           sliceProps.put(Slice.RANGE, ranges == null? null: ranges.get(i));
 
           newSlices.put(sliceName, new Slice(sliceName, null, sliceProps));
         }
 
         // TODO: fill in with collection properties read from the /collections/<collectionName> node
-        Map<String,Object> collectionProps = new HashMap<String,Object>();
+        Map<String,Object> collectionProps = new HashMap<>();
 
         for (Entry<String, Object> e : OverseerCollectionProcessor.COLL_PROPS.entrySet()) {
           Object val = message.get(e.getKey());
@@ -791,7 +791,7 @@ public class Overseer {
       private ClusterState updateSlice(ClusterState state, String collectionName, Slice slice) {
         // System.out.println("###!!!### OLD CLUSTERSTATE: " + JSONUtil.toJSON(state.getCollectionStates()));
         // System.out.println("Updating slice:" + slice);
-        Map<String, DocCollection> newCollections = new LinkedHashMap<String,DocCollection>(state.getCollectionStates());  // make a shallow copy
+        Map<String, DocCollection> newCollections = new LinkedHashMap<>(state.getCollectionStates());  // make a shallow copy
         DocCollection coll = newCollections.get(collectionName);
         Map<String,Slice> slices;
         Map<String,Object> props;
@@ -800,14 +800,14 @@ public class Overseer {
         if (coll == null) {
           //  when updateSlice is called on a collection that doesn't exist, it's currently when a core is publishing itself
           // without explicitly creating a collection.  In this current case, we assume custom sharding with an "implicit" router.
-          slices = new HashMap<String, Slice>(1);
-          props = new HashMap<String,Object>(1);
+          slices = new HashMap<>(1);
+          props = new HashMap<>(1);
           props.put(DocCollection.DOC_ROUTER, ZkNodeProps.makeMap("name",ImplicitDocRouter.NAME));
           router = new ImplicitDocRouter();
         } else {
           props = coll.getProperties();
           router = coll.getRouter();
-          slices = new LinkedHashMap<String, Slice>(coll.getSlicesMap()); // make a shallow copy
+          slices = new LinkedHashMap<>(coll.getSlicesMap()); // make a shallow copy
         }
         slices.put(slice.getName(), slice);
         DocCollection newCollection = new DocCollection(collectionName, slices, props, router);
@@ -820,7 +820,7 @@ public class Overseer {
       
       private ClusterState setShardLeader(ClusterState state, String collectionName, String sliceName, String leaderUrl) {
 
-        final Map<String, DocCollection> newCollections = new LinkedHashMap<String,DocCollection>(state.getCollectionStates());
+        final Map<String, DocCollection> newCollections = new LinkedHashMap<>(state.getCollectionStates());
         DocCollection coll = newCollections.get(collectionName);
         if(coll == null) {
           log.error("Could not mark shard leader for non existing collection:" + collectionName);
@@ -829,7 +829,7 @@ public class Overseer {
 
         Map<String, Slice> slices = coll.getSlicesMap();
         // make a shallow copy and add it to the new collection
-        slices = new LinkedHashMap<String,Slice>(slices);
+        slices = new LinkedHashMap<>(slices);
 
         Slice slice = slices.get(sliceName);
         if (slice == null) {
@@ -844,7 +844,7 @@ public class Overseer {
 
           Replica oldLeader = slice.getLeader();
 
-          final Map<String,Replica> newReplicas = new LinkedHashMap<String,Replica>();
+          final Map<String,Replica> newReplicas = new LinkedHashMap<>();
 
           for (Replica replica : slice.getReplicas()) {
 
@@ -852,11 +852,11 @@ public class Overseer {
             String coreURL = ZkCoreNodeProps.getCoreUrl(replica.getStr(ZkStateReader.BASE_URL_PROP), replica.getStr(ZkStateReader.CORE_NAME_PROP));
 
             if (replica == oldLeader && !coreURL.equals(leaderUrl)) {
-              Map<String,Object> replicaProps = new LinkedHashMap<String,Object>(replica.getProperties());
+              Map<String,Object> replicaProps = new LinkedHashMap<>(replica.getProperties());
               replicaProps.remove(Slice.LEADER);
               replica = new Replica(replica.getName(), replicaProps);
             } else if (coreURL.equals(leaderUrl)) {
-              Map<String,Object> replicaProps = new LinkedHashMap<String,Object>(replica.getProperties());
+              Map<String,Object> replicaProps = new LinkedHashMap<>(replica.getProperties());
               replicaProps.put(Slice.LEADER, "true");  // TODO: allow booleans instead of strings
               replica = new Replica(replica.getName(), replicaProps);
             }
@@ -901,7 +901,7 @@ public class Overseer {
 
       DocCollection coll = clusterState.getCollection(collection);
 
-      Map<String, Slice> newSlices = new LinkedHashMap<String, Slice>(coll.getSlicesMap());
+      Map<String, Slice> newSlices = new LinkedHashMap<>(coll.getSlicesMap());
       newSlices.remove(sliceId);
 
       DocCollection newCollection = new DocCollection(coll.getName(), newSlices, coll.getProperties(), coll.getRouter());
@@ -916,7 +916,7 @@ public class Overseer {
         final String collection = message.getStr(ZkStateReader.COLLECTION_PROP);
         if (!checkCollectionKeyExistence(message)) return clusterState;
 
-//        final Map<String, DocCollection> newCollections = new LinkedHashMap<String,DocCollection>(clusterState.getCollectionStates()); // shallow copy
+//        final Map<String, DocCollection> newCollections = new LinkedHashMap<>(clusterState.getCollectionStates()); // shallow copy
 //        DocCollection coll = newCollections.get(collection);
         DocCollection coll = clusterState.getCollectionOrNull(collection) ;
         if (coll == null) {
@@ -933,7 +933,7 @@ public class Overseer {
           return clusterState;
         }
 
-        Map<String, Slice> newSlices = new LinkedHashMap<String, Slice>();
+        Map<String, Slice> newSlices = new LinkedHashMap<>();
         boolean lastSlice = false;
         for (Slice slice : coll.getSlices()) {
           Replica replica = slice.getReplica(cnn);