You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@opennlp.apache.org by jo...@apache.org on 2016/12/20 10:14:19 UTC

[3/3] opennlp git commit: OPENNLP-871: Cleanup code base for release

OPENNLP-871: Cleanup code base for release

This closes #11


Project: http://git-wip-us.apache.org/repos/asf/opennlp/repo
Commit: http://git-wip-us.apache.org/repos/asf/opennlp/commit/afc6b65b
Tree: http://git-wip-us.apache.org/repos/asf/opennlp/tree/afc6b65b
Diff: http://git-wip-us.apache.org/repos/asf/opennlp/diff/afc6b65b

Branch: refs/heads/trunk
Commit: afc6b65b74a4d774cf122fd73733527b181d7d85
Parents: 664d803
Author: smarthi <sm...@apache.org>
Authored: Tue Dec 20 01:47:04 2016 -0500
Committer: Joern Kottmann <jo...@apache.org>
Committed: Tue Dec 20 11:09:13 2016 +0100

----------------------------------------------------------------------
 .../builder/MorfologikDictionayBuilder.java     |  4 +-
 .../builder/XMLDictionaryToTableTool.java       |  8 ++--
 .../lemmatizer/MorfologikLemmatizer.java        | 25 +++++++------
 .../tagdict/MorfologikPOSTaggerFactory.java     | 18 +++------
 .../tagdict/MorfologikTagDictionary.java        |  2 +-
 .../java/opennlp/tools/chunker/ChunkSample.java | 14 +++----
 .../chunker/ChunkSampleSequenceStream.java      |  2 +-
 .../java/opennlp/tools/chunker/Chunker.java     |  8 ++--
 .../tools/chunker/ChunkerContextGenerator.java  |  2 +-
 .../tools/chunker/ChunkerCrossValidator.java    |  6 +--
 .../tools/chunker/ChunkerEventStream.java       |  2 +-
 .../opennlp/tools/chunker/ChunkerFactory.java   |  4 +-
 .../java/opennlp/tools/chunker/ChunkerME.java   |  6 +--
 .../opennlp/tools/cmdline/ArgumentParser.java   | 30 +++++++--------
 .../main/java/opennlp/tools/cmdline/CLI.java    |  4 +-
 .../tools/cmdline/DetailedFMeasureListener.java |  8 ++--
 .../tools/cmdline/EvaluationErrorPrinter.java   | 14 +++----
 .../tools/cmdline/GenerateManualTool.java       |  2 +-
 .../java/opennlp/tools/cmdline/ModelLoader.java |  3 +-
 .../tools/cmdline/StreamFactoryRegistry.java    |  9 ++---
 .../chunker/ChunkerCrossValidatorTool.java      |  2 +-
 .../cmdline/chunker/ChunkerEvaluatorTool.java   |  2 +-
 .../tools/cmdline/chunker/ChunkerMETool.java    |  2 +-
 .../doccat/DoccatFineGrainedReportListener.java | 38 +++++++++----------
 .../tools/cmdline/doccat/DoccatTool.java        |  2 +-
 .../tools/cmdline/doccat/DoccatTrainerTool.java |  3 +-
 .../cmdline/entitylinker/EntityLinkerTool.java  |  2 +-
 .../LemmatizerFineGrainedReportListener.java    | 36 +++++++++---------
 .../cmdline/lemmatizer/LemmatizerMETool.java    |  2 +-
 .../cmdline/namefind/TokenNameFinderTool.java   |  2 +-
 .../tools/cmdline/postag/POSModelLoader.java    |  5 +--
 .../POSTaggerFineGrainedReportListener.java     | 39 ++++++++++----------
 .../tools/cmdline/postag/POSTaggerTool.java     |  2 +-
 .../cmdline/postag/POSTaggerTrainerTool.java    | 26 +++++++------
 .../cmdline/tokenizer/CommandLineTokenizer.java |  4 +-
 .../opennlp/tools/dictionary/Dictionary.java    | 11 ++----
 .../java/opennlp/tools/dictionary/Index.java    |  2 +-
 .../tools/dictionary/serializer/Attributes.java |  2 +-
 .../serializer/DictionarySerializer.java        | 14 +++----
 .../doccat/BagOfWordsFeatureGenerator.java      |  2 +-
 .../tools/doccat/DoccatCrossValidator.java      |  2 +-
 .../opennlp/tools/doccat/DoccatFactory.java     |  3 +-
 .../java/opennlp/tools/doccat/DoccatModel.java  |  6 +--
 .../DocumentCategorizerContextGenerator.java    |  2 +-
 .../tools/doccat/DocumentCategorizerME.java     |  9 ++---
 .../opennlp/tools/doccat/DocumentSample.java    |  2 +-
 .../tools/doccat/NGramFeatureGenerator.java     |  2 +-
 .../opennlp/tools/entitylinker/BaseLink.java    | 10 +----
 .../opennlp/tools/entitylinker/LinkedSpan.java  |  8 +---
 .../formats/BioNLP2004NameSampleStream.java     |  6 +--
 .../tools/formats/Conll02NameSampleStream.java  |  8 ++--
 .../tools/formats/Conll03NameSampleStream.java  | 11 +++---
 .../tools/formats/ConllXPOSSampleStream.java    |  4 +-
 .../tools/formats/ad/ADChunkSampleStream.java   |  6 +--
 .../tools/formats/ad/ADNameSampleStream.java    | 19 ++++------
 .../tools/formats/ad/ADPOSSampleStream.java     |  8 ++--
 .../formats/ad/ADPOSSampleStreamFactory.java    |  4 +-
 .../formats/ad/ADSentenceSampleStream.java      |  2 +-
 .../ad/ADSentenceSampleStreamFactory.java       |  5 +--
 .../tools/formats/ad/ADSentenceStream.java      | 13 ++-----
 .../formats/brat/AnnotationConfiguration.java   |  7 +---
 .../formats/brat/BratAnnotationStream.java      |  3 +-
 .../tools/formats/brat/BratDocumentStream.java  |  4 +-
 .../formats/brat/BratNameSampleStream.java      |  8 ++--
 .../convert/AbstractToSentenceSampleStream.java |  2 +-
 .../formats/convert/ParseToPOSSampleStream.java |  4 +-
 .../DefaultLemmatizerContextGenerator.java      |  6 +--
 .../tools/lemmatizer/DictionaryLemmatizer.java  |  6 +--
 .../opennlp/tools/lemmatizer/LemmaSample.java   | 12 +++---
 .../lemmatizer/LemmaSampleEventStream.java      |  2 +-
 .../lemmatizer/LemmaSampleSequenceStream.java   |  2 +-
 .../tools/lemmatizer/LemmaSampleStream.java     | 10 ++---
 .../opennlp/tools/lemmatizer/Lemmatizer.java    |  2 +-
 .../lemmatizer/LemmatizerContextGenerator.java  |  2 +-
 .../tools/lemmatizer/LemmatizerFactory.java     |  4 +-
 .../opennlp/tools/lemmatizer/LemmatizerME.java  |  6 +--
 .../java/opennlp/tools/ml/TrainerFactory.java   |  8 ++--
 .../opennlp/tools/ml/maxent/DataStream.java     |  4 +-
 .../main/java/opennlp/tools/ml/maxent/GIS.java  |  6 +--
 .../tools/ml/maxent/RealBasicEventStream.java   |  2 +-
 .../ml/maxent/io/BinaryGISModelWriter.java      |  2 +-
 .../tools/ml/maxent/io/GISModelWriter.java      |  9 ++---
 .../ml/maxent/io/OldFormatGISModelReader.java   |  6 +--
 .../ml/maxent/io/PlainTextGISModelWriter.java   |  6 +--
 .../io/SuffixSensitiveGISModelReader.java       |  3 +-
 .../quasinewton/ParallelNegLogLikelihood.java   |  4 +-
 .../opennlp/tools/ml/model/MaxentModel.java     | 18 ++++-----
 .../tools/ml/model/OnePassDataIndexer.java      | 14 +++----
 .../main/java/opennlp/tools/ml/model/Prior.java |  6 +--
 .../opennlp/tools/ml/model/SequenceStream.java  |  2 +-
 .../tools/ml/model/TwoPassDataIndexer.java      | 15 ++++----
 .../tools/ml/naivebayes/LogProbabilities.java   |  2 +-
 .../tools/ml/naivebayes/NaiveBayesModel.java    |  8 ++--
 .../ml/naivebayes/NaiveBayesModelWriter.java    |  6 +--
 .../tools/ml/naivebayes/NaiveBayesTrainer.java  |  2 +-
 .../PlainTextNaiveBayesModelWriter.java         |  6 +--
 .../tools/ml/naivebayes/Probabilities.java      |  6 +--
 .../java/opennlp/tools/namefind/BilouCodec.java |  5 +--
 .../java/opennlp/tools/namefind/BioCodec.java   |  7 ++--
 .../tools/namefind/DictionaryNameFinder.java    |  2 +-
 .../tools/namefind/DocumentNameFinder.java      |  2 +-
 .../tools/namefind/NameContextGenerator.java    |  6 +--
 .../tools/namefind/NameFinderEventStream.java   |  7 ++--
 .../opennlp/tools/namefind/NameFinderME.java    | 13 +++----
 .../java/opennlp/tools/namefind/NameSample.java |  8 ++--
 .../namefind/NameSampleSequenceStream.java      |  6 +--
 .../tools/namefind/NameSampleTypeFilter.java    |  6 +--
 .../tools/namefind/RegexNameFinderFactory.java  |  6 +--
 .../opennlp/tools/namefind/TokenNameFinder.java |  4 +-
 .../namefind/TokenNameFinderCrossValidator.java |  6 +--
 .../namefind/TokenNameFinderEvaluator.java      |  8 +---
 .../tools/namefind/TokenNameFinderModel.java    | 19 ++++------
 .../java/opennlp/tools/ngram/NGramModel.java    |  2 +-
 .../tools/parser/AbstractBottomUpParser.java    | 10 ++---
 .../tools/parser/AbstractParserEventStream.java | 14 +++----
 .../opennlp/tools/parser/ChunkSampleStream.java |  8 ++--
 .../java/opennlp/tools/parser/GapLabeler.java   |  2 +-
 .../java/opennlp/tools/parser/HeadRules.java    |  4 +-
 .../main/java/opennlp/tools/parser/Parse.java   | 17 +++++----
 .../main/java/opennlp/tools/parser/Parser.java  |  4 +-
 .../tools/parser/ParserChunkerFactory.java      |  2 +-
 .../parser/ParserChunkerSequenceValidator.java  |  3 +-
 .../tools/parser/ParserCrossValidator.java      |  6 +--
 .../opennlp/tools/parser/ParserEvaluator.java   |  4 +-
 .../java/opennlp/tools/parser/ParserModel.java  | 12 +++---
 .../parser/chunking/BuildContextGenerator.java  |  2 +-
 .../parser/chunking/CheckContextGenerator.java  |  2 +-
 .../opennlp/tools/parser/chunking/Parser.java   | 22 +++++------
 .../parser/chunking/ParserEventStream.java      | 39 ++++++++++----------
 .../opennlp/tools/parser/lang/en/HeadRules.java |  9 ++---
 .../parser/lang/es/AncoraSpanishHeadRules.java  |  9 ++---
 .../postag/DefaultPOSSequenceValidator.java     |  6 +--
 .../tools/postag/MutableTagDictionary.java      |  4 +-
 .../tools/postag/POSContextGenerator.java       |  2 +-
 .../java/opennlp/tools/postag/POSTagger.java    | 10 ++---
 .../tools/postag/POSTaggerCrossValidator.java   |  2 +-
 .../opennlp/tools/postag/TagDictionary.java     |  2 +-
 .../tools/sentdetect/EndOfSentenceScanner.java  |  8 ++--
 .../tools/sentdetect/SDContextGenerator.java    |  2 +-
 .../tools/sentdetect/SentenceDetector.java      |  4 +-
 .../opennlp/tools/stemmer/PorterStemmer.java    | 35 ++++++++----------
 .../java/opennlp/tools/stemmer/Stemmer.java     |  2 +-
 .../tools/tokenize/TokenContextGenerator.java   |  2 +-
 .../java/opennlp/tools/tokenize/Tokenizer.java  |  4 +-
 .../tools/util/BeamSearchContextGenerator.java  |  2 +-
 .../src/main/java/opennlp/tools/util/Heap.java  | 16 ++++----
 .../opennlp/tools/util/TrainingParameters.java  |  6 +--
 .../opennlp/tools/util/model/BaseModel.java     | 18 ++++-----
 148 files changed, 494 insertions(+), 621 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/builder/MorfologikDictionayBuilder.java
----------------------------------------------------------------------
diff --git a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/builder/MorfologikDictionayBuilder.java b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/builder/MorfologikDictionayBuilder.java
index dbbca4d..54cb95c 100644
--- a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/builder/MorfologikDictionayBuilder.java
+++ b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/builder/MorfologikDictionayBuilder.java
@@ -17,12 +17,10 @@
 
 package opennlp.morfologik.builder;
 
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.nio.charset.Charset;
 import java.nio.file.Path;
 import java.util.Properties;
-
 import morfologik.stemming.DictionaryMetadata;
 import morfologik.stemming.EncoderType;
 import morfologik.tools.DictCompile;
@@ -90,7 +88,7 @@ public class MorfologikDictionayBuilder {
   }
 
   Properties createProperties(Charset encoding, String separator,
-      EncoderType encoderType) throws FileNotFoundException, IOException {
+      EncoderType encoderType) throws IOException {
 
     Properties properties = new Properties();
     properties.setProperty("fsa.dict.separator", separator);

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/cmdline/builder/XMLDictionaryToTableTool.java
----------------------------------------------------------------------
diff --git a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/cmdline/builder/XMLDictionaryToTableTool.java b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/cmdline/builder/XMLDictionaryToTableTool.java
index 0e7f2d5..ef6668e 100644
--- a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/cmdline/builder/XMLDictionaryToTableTool.java
+++ b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/cmdline/builder/XMLDictionaryToTableTool.java
@@ -59,7 +59,7 @@ public class XMLDictionaryToTableTool extends BasicCmdLineTool {
     CmdLineUtil.checkInputFile("dictionary input file", dictInFile);
     CmdLineUtil.checkOutputFile("dictionary output file", dictOutFile);
 
-    POSDictionary tagDictionary = null;
+    POSDictionary tagDictionary;
     try {
       tagDictionary = POSDictionary.create(new FileInputStream(dictInFile));
     } catch (IOException e) {
@@ -116,12 +116,10 @@ public class XMLDictionaryToTableTool extends BasicCmdLineTool {
   }
 
   private String createEntry(String word, String tag) {
-    
-    String entry = "" + SEPARATOR +// base
+
+    return "" + SEPARATOR +// base
         word + SEPARATOR +
         tag;
-        
-    return entry;
   }
 
 }

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/lemmatizer/MorfologikLemmatizer.java
----------------------------------------------------------------------
diff --git a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/lemmatizer/MorfologikLemmatizer.java b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/lemmatizer/MorfologikLemmatizer.java
index 2090ce5..2798e42 100644
--- a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/lemmatizer/MorfologikLemmatizer.java
+++ b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/lemmatizer/MorfologikLemmatizer.java
@@ -24,6 +24,8 @@ import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
+import java.util.Objects;
 import java.util.Set;
 
 import morfologik.stemming.Dictionary;
@@ -35,19 +37,18 @@ import opennlp.tools.lemmatizer.DictionaryLemmatizer;
 public class MorfologikLemmatizer implements DictionaryLemmatizer {
 
   private IStemmer dictLookup;
-  public final Set<String> constantTags = new HashSet<String>(Arrays.asList(
-      "NNP", "NP00000"));
+  public final Set<String> constantTags = new HashSet<>(Arrays.asList("NNP", "NP00000"));
 
   public MorfologikLemmatizer(Path dictionaryPath) throws IllegalArgumentException,
       IOException {
     dictLookup = new DictionaryLookup(Dictionary.read(dictionaryPath));
   }
 
-  private HashMap<List<String>, String> getLemmaTagsDict(String word) {
+  private Map<List<String>, String> getLemmaTagsDict(String word) {
     List<WordData> wdList = dictLookup.lookup(word);
-    HashMap<List<String>, String> dictMap = new HashMap<List<String>, String>();
+    Map<List<String>, String> dictMap = new HashMap<>();
     for (WordData wd : wdList) {
-      List<String> wordLemmaTags = new ArrayList<String>();
+      List<String> wordLemmaTags = new ArrayList<>();
       wordLemmaTags.add(word);
       wordLemmaTags.add(wd.getTag().toString());
       dictMap.put(wordLemmaTags, wd.getStem().toString());
@@ -56,7 +57,7 @@ public class MorfologikLemmatizer implements DictionaryLemmatizer {
   }
 
   private List<String> getDictKeys(String word, String postag) {
-    List<String> keys = new ArrayList<String>();
+    List<String> keys = new ArrayList<>();
     if (constantTags.contains(postag)) {
       keys.addAll(Arrays.asList(word, postag));
     } else {
@@ -65,8 +66,8 @@ public class MorfologikLemmatizer implements DictionaryLemmatizer {
     return keys;
   }
 
-  private HashMap<List<String>, String> getDictMap(String word, String postag) {
-    HashMap<List<String>, String> dictMap = new HashMap<List<String>, String>();
+  private Map<List<String>, String> getDictMap(String word, String postag) {
+    Map<List<String>, String> dictMap;
 
     if (constantTags.contains(postag)) {
       dictMap = this.getLemmaTagsDict(word);
@@ -77,16 +78,16 @@ public class MorfologikLemmatizer implements DictionaryLemmatizer {
   }
 
   public String lemmatize(String word, String postag) {
-    String lemma = null;
+    String lemma;
     List<String> keys = this.getDictKeys(word, postag);
-    HashMap<List<String>, String> dictMap = this.getDictMap(word, postag);
+    Map<List<String>, String> dictMap = this.getDictMap(word, postag);
     // lookup lemma as value of the map
     String keyValue = dictMap.get(keys);
     if (keyValue != null) {
       lemma = keyValue;
-    } else if (keyValue == null && constantTags.contains(postag)) {
+    } else if (constantTags.contains(postag)) {
       lemma = word;
-    } else if (keyValue == null && word.toUpperCase() == word) {
+    } else if (Objects.equals(word.toUpperCase(), word)) {
       lemma = word;
     } else {
       lemma = word.toLowerCase();

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/tagdict/MorfologikPOSTaggerFactory.java
----------------------------------------------------------------------
diff --git a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/tagdict/MorfologikPOSTaggerFactory.java b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/tagdict/MorfologikPOSTaggerFactory.java
index 93d6c61..9a6abc9 100644
--- a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/tagdict/MorfologikPOSTaggerFactory.java
+++ b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/tagdict/MorfologikPOSTaggerFactory.java
@@ -26,12 +26,10 @@ import java.io.OutputStream;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.Map;
-
 import morfologik.stemming.DictionaryMetadata;
 import opennlp.tools.dictionary.Dictionary;
 import opennlp.tools.postag.POSTaggerFactory;
 import opennlp.tools.postag.TagDictionary;
-import opennlp.tools.util.InvalidFormatException;
 import opennlp.tools.util.model.ArtifactSerializer;
 import opennlp.tools.util.model.ModelUtil;
 
@@ -53,8 +51,7 @@ public class MorfologikPOSTaggerFactory extends POSTaggerFactory {
   public MorfologikPOSTaggerFactory() {
   }
   
-  public TagDictionary createTagDictionary(File dictionary)
-      throws InvalidFormatException, FileNotFoundException, IOException {
+  public TagDictionary createTagDictionary(File dictionary) throws IOException {
     
     if(!dictionary.canRead()) {
       throw new FileNotFoundException("Could not read dictionary: " + dictionary.getAbsolutePath());
@@ -87,9 +84,9 @@ public class MorfologikPOSTaggerFactory extends POSTaggerFactory {
       if (artifactProvider != null) {
         Object obj = artifactProvider.getArtifact(MORFOLOGIK_POSDICT);
         if (obj != null) {
-          byte[] data = (byte[]) artifactProvider
+          byte[] data = artifactProvider
               .getArtifact(MORFOLOGIK_POSDICT);
-          byte[] info = (byte[]) artifactProvider
+          byte[] info = artifactProvider
               .getArtifact(MORFOLOGIK_DICT_INFO);
 
           try {
@@ -121,7 +118,7 @@ public class MorfologikPOSTaggerFactory extends POSTaggerFactory {
 
   @Override
   public TagDictionary createTagDictionary(InputStream in)
-      throws InvalidFormatException, IOException {
+      throws IOException {
     throw new UnsupportedOperationException(
         "Morfologik POS Tagger factory does not support this operation");
   }
@@ -129,8 +126,7 @@ public class MorfologikPOSTaggerFactory extends POSTaggerFactory {
   @Override
   @SuppressWarnings("rawtypes")
   public Map<String, ArtifactSerializer> createArtifactSerializersMap() {
-    Map<String, ArtifactSerializer> serializers = super
-        .createArtifactSerializersMap();
+    Map<String, ArtifactSerializer> serializers = super.createArtifactSerializersMap();
 
     serializers.put(MORFOLOGIK_POSDICT_SUF, new ByteArraySerializer());
     serializers.put(MORFOLOGIK_DICT_INFO_SUF, new ByteArraySerializer());
@@ -156,9 +152,7 @@ public class MorfologikPOSTaggerFactory extends POSTaggerFactory {
 
   static class ByteArraySerializer implements ArtifactSerializer<byte[]> {
 
-    public byte[] create(InputStream in) throws IOException,
-        InvalidFormatException {
-
+    public byte[] create(InputStream in) throws IOException {
       return ModelUtil.read(in);
     }
 

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/tagdict/MorfologikTagDictionary.java
----------------------------------------------------------------------
diff --git a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/tagdict/MorfologikTagDictionary.java b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/tagdict/MorfologikTagDictionary.java
index b34ca2b..a57b4bb 100644
--- a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/tagdict/MorfologikTagDictionary.java
+++ b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/tagdict/MorfologikTagDictionary.java
@@ -77,7 +77,7 @@ public class MorfologikTagDictionary implements TagDictionary {
 
     List<WordData> data = dictLookup.lookup(word);
     if (data != null && data.size() > 0) {
-      List<String> tags = new ArrayList<String>(data.size());
+      List<String> tags = new ArrayList<>(data.size());
       for (int i = 0; i < data.size(); i++) {
         tags.add(data.get(i).getTag().toString());
       }

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkSample.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkSample.java b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkSample.java
index da71d36..c61b051 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkSample.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkSample.java
@@ -47,9 +47,9 @@ public class ChunkSample {
 
     validateArguments(sentence.length, tags.length, preds.length);
 
-    this.sentence = Collections.unmodifiableList(new ArrayList<String>(Arrays.asList(sentence)));
-    this.tags = Collections.unmodifiableList(new ArrayList<String>(Arrays.asList(tags)));
-    this.preds = Collections.unmodifiableList(new ArrayList<String>(Arrays.asList(preds)));
+    this.sentence = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(sentence)));
+    this.tags = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(tags)));
+    this.preds = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(preds)));
   }
 
   /**
@@ -66,9 +66,9 @@ public class ChunkSample {
 
     validateArguments(sentence.size(), tags.size(), preds.size());
 
-    this.sentence = Collections.unmodifiableList(new ArrayList<String>(sentence));
-    this.tags = Collections.unmodifiableList(new ArrayList<String>(tags));
-    this.preds = Collections.unmodifiableList(new ArrayList<String>(preds));
+    this.sentence = Collections.unmodifiableList(new ArrayList<>(sentence));
+    this.tags = Collections.unmodifiableList(new ArrayList<>(tags));
+    this.preds = Collections.unmodifiableList(new ArrayList<>(preds));
   }
 
   /** Gets the training sentence */
@@ -109,7 +109,7 @@ public class ChunkSample {
     validateArguments(aSentence.length, aTags.length, aPreds.length);
 
     // initialize with the list maximum size
-    List<Span> phrases = new ArrayList<Span>(aSentence.length);
+    List<Span> phrases = new ArrayList<>(aSentence.length);
     String startTag = "";
     int startIndex = 0;
     boolean foundPhrase = false;

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkSampleSequenceStream.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkSampleSequenceStream.java b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkSampleSequenceStream.java
index e4da42f..8935829 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkSampleSequenceStream.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkSampleSequenceStream.java
@@ -53,7 +53,7 @@ public class ChunkSampleSequenceStream implements SequenceStream {
 
         events[i] = new Event(tags[i], context);
       }
-      return new Sequence<ChunkSample>(events,sample);
+      return new Sequence<>(events,sample);
     }
 
     return null;

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/chunker/Chunker.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/chunker/Chunker.java b/opennlp-tools/src/main/java/opennlp/tools/chunker/Chunker.java
index b26f01b..c496d12 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/chunker/Chunker.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/chunker/Chunker.java
@@ -33,7 +33,7 @@ public interface Chunker {
    *
    * @return an array of chunk tags for each token in the sequence.
    */
-  public String[] chunk(String[] toks, String tags[]);
+  String[] chunk(String[] toks, String tags[]);
 
   /**
    * Generates tagged chunk spans for the given sequence returning the result in a span array.
@@ -43,7 +43,7 @@ public interface Chunker {
    *
    * @return an array of spans with chunk tags for each chunk in the sequence.
    */
-  public Span[] chunkAsSpans(String[] toks, String tags[]);
+  Span[] chunkAsSpans(String[] toks, String tags[]);
 
   /**
    * Returns the top k chunk sequences for the specified sentence with the specified pos-tags
@@ -52,7 +52,7 @@ public interface Chunker {
    *
    * @return the top k chunk sequences for the specified sentence.
    */
-  public Sequence[] topKSequences(String[] sentence, String[] tags);
+  Sequence[] topKSequences(String[] sentence, String[] tags);
 
   /**
    * Returns the top k chunk sequences for the specified sentence with the specified pos-tags
@@ -62,5 +62,5 @@ public interface Chunker {
    *
    * @return the top k chunk sequences for the specified sentence.
    */
-  public Sequence[] topKSequences(String[] sentence, String[] tags, double minSequenceScore);
+  Sequence[] topKSequences(String[] sentence, String[] tags, double minSequenceScore);
 }

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerContextGenerator.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerContextGenerator.java b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerContextGenerator.java
index 8b1e745..25e531a 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerContextGenerator.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerContextGenerator.java
@@ -32,5 +32,5 @@ public interface ChunkerContextGenerator extends BeamSearchContextGenerator<Stri
    * @param preds The previous decisions made in the taging of this sequence.  Only indices less than i will be examined.
    * @return An array of predictive contexts on which a model basis its decisions.
    */
-  public String[] getContext(int i, String[] toks, String[] tags, String[] preds);
+  String[] getContext(int i, String[] toks, String[] tags, String[] preds);
 }

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerCrossValidator.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerCrossValidator.java b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerCrossValidator.java
index e9ce18d..f696503 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerCrossValidator.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerCrossValidator.java
@@ -18,8 +18,6 @@
 package opennlp.tools.chunker;
 
 import java.io.IOException;
-
-import opennlp.tools.util.InvalidFormatException;
 import opennlp.tools.util.ObjectStream;
 import opennlp.tools.util.TrainingParameters;
 import opennlp.tools.util.eval.CrossValidationPartitioner;
@@ -53,8 +51,8 @@ public class ChunkerCrossValidator {
    * @throws IOException
    */
   public void evaluate(ObjectStream<ChunkSample> samples, int nFolds)
-      throws IOException, InvalidFormatException {
-    CrossValidationPartitioner<ChunkSample> partitioner = new CrossValidationPartitioner<ChunkSample>(
+      throws IOException {
+    CrossValidationPartitioner<ChunkSample> partitioner = new CrossValidationPartitioner<>(
         samples, nFolds);
 
     while (partitioner.hasNext()) {

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerEventStream.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerEventStream.java b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerEventStream.java
index 118357c..d72d115 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerEventStream.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerEventStream.java
@@ -47,7 +47,7 @@ public class ChunkerEventStream extends AbstractEventStream<ChunkSample> {
   protected Iterator<Event> createEvents(ChunkSample sample) {
 
     if (sample != null) {
-      List<Event> events = new ArrayList<Event>();
+      List<Event> events = new ArrayList<>();
       String[] toksArray = sample.getSentence();
       String[] tagsArray = sample.getTags();
       String[] predsArray = sample.getPreds();

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerFactory.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerFactory.java b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerFactory.java
index c59be29..1cb772f 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerFactory.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerFactory.java
@@ -38,9 +38,7 @@ public class ChunkerFactory extends BaseToolFactory {
       return new ChunkerFactory();
     }
     try {
-      ChunkerFactory theFactory = ExtensionLoader.instantiateExtension(
-          ChunkerFactory.class, subclassName);
-      return theFactory;
+      return ExtensionLoader.instantiateExtension(ChunkerFactory.class, subclassName);
     } catch (Exception e) {
       String msg = "Could not instantiate the " + subclassName
           + ". The initialization throw an exception.";

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerME.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerME.java b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerME.java
index e5b1073..3ed4f9c 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerME.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/chunker/ChunkerME.java
@@ -77,7 +77,7 @@ public class ChunkerME implements Chunker {
       this.model = model.getChunkerSequenceModel();
     }
     else {
-      this.model = new opennlp.tools.ml.BeamSearch<String>(beamSize,
+      this.model = new opennlp.tools.ml.BeamSearch<>(beamSize,
           model.getChunkerModel(), 0);
     }
   }
@@ -101,7 +101,7 @@ public class ChunkerME implements Chunker {
       this.model = model.getChunkerSequenceModel();
     }
     else {
-      this.model = new opennlp.tools.ml.BeamSearch<String>(beamSize,
+      this.model = new opennlp.tools.ml.BeamSearch<>(beamSize,
           model.getChunkerModel(), 0);
     }
   }
@@ -168,7 +168,7 @@ public class ChunkerME implements Chunker {
       beamSize = Integer.parseInt(beamSizeString);
     }
 
-    Map<String, String> manifestInfoEntries = new HashMap<String, String>();
+    Map<String, String> manifestInfoEntries = new HashMap<>();
 
     TrainerType trainerType = TrainerFactory.getTrainerType(mlParams.getSettings());
 

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/ArgumentParser.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/ArgumentParser.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/ArgumentParser.java
index cfc17aa..ca9a650 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/ArgumentParser.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/ArgumentParser.java
@@ -51,19 +51,19 @@ public class ArgumentParser {
 
   public @Retention(RetentionPolicy.RUNTIME)
   @interface OptionalParameter {
-    public static final String DEFAULT_CHARSET = "DEFAULT_CHARSET";
-    public String defaultValue() default "";
+    String DEFAULT_CHARSET = "DEFAULT_CHARSET";
+    String defaultValue() default "";
   }
 
   public @Retention(RetentionPolicy.RUNTIME)
   @interface ParameterDescription {
-    public String valueName();
-    public String description() default "";
+    String valueName();
+    String description() default "";
   }
 
   private interface ArgumentFactory {
 
-    static final String INVALID_ARG = "Invalid argument: %s %s \n";
+    String INVALID_ARG = "Invalid argument: %s %s \n";
 
     Object parseArgument(Method method, String argName, String argValue);
   }
@@ -148,7 +148,7 @@ public class ArgumentParser {
   private static final Map<Class<?>, ArgumentFactory> argumentFactories;
 
   static {
-    Map<Class<?>, ArgumentFactory> factories = new HashMap<Class<?>, ArgumentParser.ArgumentFactory>();
+    Map<Class<?>, ArgumentFactory> factories = new HashMap<>();
     factories.put(Integer.class, new IntegerArgumentFactory());
     factories.put(Boolean.class, new BooleanArgumentFactory());
     factories.put(String.class, new StringArgumentFactory());
@@ -205,9 +205,7 @@ public class ArgumentParser {
     // name length is checked to be at least 4 prior
     parameterNameChars[3] = Character.toLowerCase(parameterNameChars[3]);
 
-    String parameterName = "-" + new String(parameterNameChars).substring(3);
-
-    return parameterName;
+    return "-" + new String(parameterNameChars).substring(3);
   }
 
   /**
@@ -270,9 +268,9 @@ public class ArgumentParser {
   public static <T> List<Argument> createArguments(Class<T>... argProxyInterfaces) {
     checkProxyInterfaces(argProxyInterfaces);
 
-    Set<String> duplicateFilter = new HashSet<String>();
+    Set<String> duplicateFilter = new HashSet<>();
 
-    List<Argument> arguments = new LinkedList<Argument>();
+    List<Argument> arguments = new LinkedList<>();
 
     for (Class<T> argProxyInterface : argProxyInterfaces) {
       if (null != argProxyInterface) {
@@ -321,7 +319,7 @@ public class ArgumentParser {
   public static <T> String createUsage(Class<T>... argProxyInterfaces) {
     checkProxyInterfaces(argProxyInterfaces);
 
-    Set<String> duplicateFilter = new HashSet<String>();
+    Set<String> duplicateFilter = new HashSet<>();
 
     StringBuilder usage = new StringBuilder();
     StringBuilder details = new StringBuilder();
@@ -348,7 +346,7 @@ public class ArgumentParser {
 
             usage.append(paramName).append(' ').append(desc.valueName());
             details.append('\t').append(paramName).append(' ').append(desc.valueName()).append('\n');
-            if(desc.description() != null && desc.description().length() > 0) {
+            if(desc.description().length() > 0) {
               details.append("\t\t").append(desc.description()).append('\n');
             }
 
@@ -425,7 +423,7 @@ public class ArgumentParser {
     }
 
     int argumentCount = 0;
-    List<String> parameters = new ArrayList<String>(Arrays.asList(args));
+    List<String> parameters = new ArrayList<>(Arrays.asList(args));
 
     for (Class<T> argProxyInterface : argProxyInterfaces) {
       for (Method method : argProxyInterface.getMethods()) {
@@ -482,7 +480,7 @@ public class ArgumentParser {
     if (!validateArguments(args, argProxyInterface))
       throw new IllegalArgumentException("Passed args must be valid!");
 
-    Map<String, Object> arguments = new HashMap<String, Object>();
+    Map<String, Object> arguments = new HashMap<>();
 
     for (Method method : argProxyInterface.getMethods()) {
 
@@ -531,7 +529,7 @@ public class ArgumentParser {
    * @return arguments pertaining to argProxyInterface
    */
   public static <T> String[] filter(String args[], Class<T> argProxyInterface) {
-    ArrayList<String> parameters = new ArrayList<String>(args.length);
+    ArrayList<String> parameters = new ArrayList<>(args.length);
 
     for (Method method : argProxyInterface.getMethods()) {
 

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/CLI.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/CLI.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/CLI.java
index 6cfe0b7..7abb6d4 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/CLI.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/CLI.java
@@ -78,9 +78,9 @@ public final class CLI {
   private static Map<String, CmdLineTool> toolLookupMap;
 
   static {
-    toolLookupMap = new LinkedHashMap<String, CmdLineTool>();
+    toolLookupMap = new LinkedHashMap<>();
 
-    List<CmdLineTool> tools = new LinkedList<CmdLineTool>();
+    List<CmdLineTool> tools = new LinkedList<>();
 
     // Document Categorizer
     tools.add(new DoccatTool());

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/DetailedFMeasureListener.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/DetailedFMeasureListener.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/DetailedFMeasureListener.java
index ccf6884..4e3ec88 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/DetailedFMeasureListener.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/DetailedFMeasureListener.java
@@ -41,7 +41,7 @@ public abstract class DetailedFMeasureListener<T> implements
 
   private int samples = 0;
   private Stats generalStats = new Stats();
-  private Map<String, Stats> statsForOutcome = new HashMap<String, Stats>();
+  private Map<String, Stats> statsForOutcome = new HashMap<>();
 
   protected abstract Span[] asSpanArray(T sample);
 
@@ -59,8 +59,8 @@ public abstract class DetailedFMeasureListener<T> implements
     Span[] references = asSpanArray(reference);
     Span[] predictions = asSpanArray(prediction);
 
-    Set<Span> refSet = new HashSet<Span>(Arrays.asList(references));
-    Set<Span> predSet = new HashSet<Span>(Arrays.asList(predictions));
+    Set<Span> refSet = new HashSet<>(Arrays.asList(references));
+    Set<Span> predSet = new HashSet<>(Arrays.asList(predictions));
 
     for (Span ref : refSet) {
       if (predSet.contains(ref)) {
@@ -129,7 +129,7 @@ public abstract class DetailedFMeasureListener<T> implements
         zeroOrPositive(generalStats.getRecallScore() * 100),
         zeroOrPositive(generalStats.getFMeasure() * 100)));
     ret.append("\n");
-    SortedSet<String> set = new TreeSet<String>(new F1Comparator());
+    SortedSet<String> set = new TreeSet<>(new F1Comparator());
     set.addAll(statsForOutcome.keySet());
     for (String type : set) {
 

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/EvaluationErrorPrinter.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/EvaluationErrorPrinter.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/EvaluationErrorPrinter.java
index 6fda10b..51e1161 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/EvaluationErrorPrinter.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/EvaluationErrorPrinter.java
@@ -40,8 +40,8 @@ public abstract class EvaluationErrorPrinter<T> implements EvaluationMonitor<T>
   // for the sentence detector
   protected void printError(Span references[], Span predictions[],
       T referenceSample, T predictedSample, String sentence) {
-    List<Span> falseNegatives = new ArrayList<Span>();
-    List<Span> falsePositives = new ArrayList<Span>();
+    List<Span> falseNegatives = new ArrayList<>();
+    List<Span> falsePositives = new ArrayList<>();
 
     findErrors(references, predictions, falseNegatives, falsePositives);
 
@@ -57,8 +57,8 @@ public abstract class EvaluationErrorPrinter<T> implements EvaluationMonitor<T>
   // for namefinder, chunker...
   protected void printError(String id, Span references[], Span predictions[],
       T referenceSample, T predictedSample, String[] sentenceTokens) {
-    List<Span> falseNegatives = new ArrayList<Span>();
-    List<Span> falsePositives = new ArrayList<Span>();
+    List<Span> falseNegatives = new ArrayList<>();
+    List<Span> falsePositives = new ArrayList<>();
 
     findErrors(references, predictions, falseNegatives, falsePositives);
 
@@ -83,9 +83,9 @@ public abstract class EvaluationErrorPrinter<T> implements EvaluationMonitor<T>
   // for pos tagger
   protected void printError(String references[], String predictions[],
       T referenceSample, T predictedSample, String[] sentenceTokens) {
-    List<String> filteredDoc = new ArrayList<String>();
-    List<String> filteredRefs = new ArrayList<String>();
-    List<String> filteredPreds = new ArrayList<String>();
+    List<String> filteredDoc = new ArrayList<>();
+    List<String> filteredRefs = new ArrayList<>();
+    List<String> filteredPreds = new ArrayList<>();
 
     for (int i = 0; i < references.length; i++) {
       if (!references[i].equals(predictions[i])) {

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/GenerateManualTool.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/GenerateManualTool.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/GenerateManualTool.java
index b2a9c42..00074ea 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/GenerateManualTool.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/GenerateManualTool.java
@@ -45,7 +45,7 @@ public class GenerateManualTool {
     appendHeader(sb);
 
     // organize by package name
-    LinkedHashMap<String, Map<String, CmdLineTool>> packageNameToolMap = new LinkedHashMap<String, Map<String, CmdLineTool>>();
+    LinkedHashMap<String, Map<String, CmdLineTool>> packageNameToolMap = new LinkedHashMap<>();
     for (String toolName : CLI.getToolLookupMap().keySet()) {
       CmdLineTool tool = CLI.getToolLookupMap().get(toolName);
       String packageName = tool.getClass().getPackage().getName();

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/ModelLoader.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/ModelLoader.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/ModelLoader.java
index 3e5db54..561a6f8 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/ModelLoader.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/ModelLoader.java
@@ -43,8 +43,7 @@ public abstract class ModelLoader<T> {
     this.modelName = modelName;
   }
 
-  protected abstract T loadModel(InputStream modelIn) throws
-      IOException, InvalidFormatException;
+  protected abstract T loadModel(InputStream modelIn) throws IOException;
 
   public T load(File modelFile) {
 

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/StreamFactoryRegistry.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/StreamFactoryRegistry.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/StreamFactoryRegistry.java
index 9173c22..2573e2e 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/StreamFactoryRegistry.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/StreamFactoryRegistry.java
@@ -60,8 +60,7 @@ import opennlp.tools.formats.ontonotes.OntoNotesParseSampleStreamFactory;
  */
 public final class StreamFactoryRegistry {
 
-  private static final Map<Class, Map<String, ObjectStreamFactory>> registry =
-      new HashMap<Class, Map<String, ObjectStreamFactory>>();
+  private static final Map<Class, Map<String, ObjectStreamFactory>> registry = new HashMap<>();
 
   static {
     ChunkerSampleStreamFactory.registerFactory();
@@ -129,7 +128,7 @@ public final class StreamFactoryRegistry {
     boolean result;
     Map<String, ObjectStreamFactory> formats = registry.get(sampleClass);
     if (null == formats) {
-      formats = new HashMap<String, ObjectStreamFactory>();
+      formats = new HashMap<>();
     }
     if (!formats.containsKey(formatName)) {
       formats.put(formatName, factory);
@@ -198,10 +197,8 @@ public final class StreamFactoryRegistry {
 
         try {
           return (ObjectStreamFactory<T>) factoryClazz.newInstance();
-        } catch (InstantiationException e) {
+        } catch (InstantiationException | IllegalAccessException e) {
         	return null;
-        } catch (IllegalAccessException e) {
-          return null;
         }
 
       } catch (ClassNotFoundException e) {

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerCrossValidatorTool.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerCrossValidatorTool.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerCrossValidatorTool.java
index a4e0a4c..2724d88 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerCrossValidatorTool.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerCrossValidatorTool.java
@@ -57,7 +57,7 @@ public final class ChunkerCrossValidatorTool
       mlParams = ModelUtil.createDefaultTrainingParameters();
     }
 
-    List<EvaluationMonitor<ChunkSample>> listeners = new LinkedList<EvaluationMonitor<ChunkSample>>();
+    List<EvaluationMonitor<ChunkSample>> listeners = new LinkedList<>();
     ChunkerDetailedFMeasureListener detailedFMeasureListener = null;
     if (params.getMisclassified()) {
       listeners.add(new ChunkEvaluationErrorListener());

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerEvaluatorTool.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerEvaluatorTool.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerEvaluatorTool.java
index 58d8703..01324cc 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerEvaluatorTool.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerEvaluatorTool.java
@@ -54,7 +54,7 @@ public final class ChunkerEvaluatorTool
 
     ChunkerModel model = new ChunkerModelLoader().load(params.getModel());
 
-    List<EvaluationMonitor<ChunkSample>> listeners = new LinkedList<EvaluationMonitor<ChunkSample>>();
+    List<EvaluationMonitor<ChunkSample>> listeners = new LinkedList<>();
     ChunkerDetailedFMeasureListener detailedFMeasureListener = null;
     if(params.getMisclassified()) {
       listeners.add(new ChunkEvaluationErrorListener());

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerMETool.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerMETool.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerMETool.java
index 25c4465..5b87c9e 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerMETool.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/chunker/ChunkerMETool.java
@@ -50,7 +50,7 @@ public class ChunkerMETool extends BasicCmdLineTool {
 
       ChunkerME chunker = new ChunkerME(model);
 
-      ObjectStream<String> lineStream = null;
+      ObjectStream<String> lineStream;
       PerformanceMonitor perfMon = null;
 
       try {

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatFineGrainedReportListener.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatFineGrainedReportListener.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatFineGrainedReportListener.java
index e07dc4f..0652d0b 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatFineGrainedReportListener.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatFineGrainedReportListener.java
@@ -26,14 +26,12 @@ import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeSet;
-
 import opennlp.tools.doccat.DoccatEvaluationMonitor;
 import opennlp.tools.doccat.DocumentSample;
 import opennlp.tools.util.Span;
@@ -330,7 +328,7 @@ public class DoccatFineGrainedReportListener implements DoccatEvaluationMonitor
     printStream.append("\n");
   }
 
-  private static final String generateAlphaLabel(int index) {
+  private static String generateAlphaLabel(int index) {
 
     char labelChars[] = new char[3];
     int i;
@@ -355,20 +353,20 @@ public class DoccatFineGrainedReportListener implements DoccatEvaluationMonitor
     private int maximumSentenceLength = Integer.MIN_VALUE;
 
     // token statistics
-    private final Map<String, Mean> tokAccuracies = new HashMap<String, Mean>();
-    private final Map<String, Counter> tokOcurrencies = new HashMap<String, Counter>();
-    private final Map<String, Counter> tokErrors = new HashMap<String, Counter>();
+    private final Map<String, Mean> tokAccuracies = new HashMap<>();
+    private final Map<String, Counter> tokOcurrencies = new HashMap<>();
+    private final Map<String, Counter> tokErrors = new HashMap<>();
 
     // tag statistics
-    private final Map<String, Counter> tagOcurrencies = new HashMap<String, Counter>();
-    private final Map<String, Counter> tagErrors = new HashMap<String, Counter>();
-    private final Map<String, FMeasure> tagFMeasure = new HashMap<String, FMeasure>();
+    private final Map<String, Counter> tagOcurrencies = new HashMap<>();
+    private final Map<String, Counter> tagErrors = new HashMap<>();
+    private final Map<String, FMeasure> tagFMeasure = new HashMap<>();
 
     // represents a Confusion Matrix that aggregates all tokens
-    private final Map<String, ConfusionMatrixLine> generalConfusionMatrix = new HashMap<String, ConfusionMatrixLine>();
+    private final Map<String, ConfusionMatrixLine> generalConfusionMatrix = new HashMap<>();
 
     // represents a set of Confusion Matrix for each token
-    private final Map<String, Map<String, ConfusionMatrixLine>> tokenConfusionMatrix = new HashMap<String, Map<String, ConfusionMatrixLine>>();
+    private final Map<String, Map<String, ConfusionMatrixLine>> tokenConfusionMatrix = new HashMap<>();
 
     public void add(DocumentSample reference, DocumentSample prediction) {
       int length = reference.getText().length;
@@ -447,13 +445,13 @@ public class DoccatFineGrainedReportListener implements DoccatEvaluationMonitor
 
     private void updateTagFMeasure(String[] refs, String[] preds) {
       // create a set with all tags
-      Set<String> tags = new HashSet<String>(Arrays.asList(refs));
+      Set<String> tags = new HashSet<>(Arrays.asList(refs));
       tags.addAll(Arrays.asList(preds));
 
       // create samples for each tag
       for (String tag : tags) {
-        List<Span> reference = new ArrayList<Span>();
-        List<Span> prediction = new ArrayList<Span>();
+        List<Span> reference = new ArrayList<>();
+        List<Span> prediction = new ArrayList<>();
         for (int i = 0; i < refs.length; i++) {
           if (refs[i].equals(tag)) {
             reference.add(new Span(i, i + 1));
@@ -509,7 +507,7 @@ public class DoccatFineGrainedReportListener implements DoccatEvaluationMonitor
     }
 
     public SortedSet<String> getTokensOrderedByFrequency() {
-      SortedSet<String> toks = new TreeSet<String>(new Comparator<String>() {
+      SortedSet<String> toks = new TreeSet<>(new Comparator<String>() {
         public int compare(String o1, String o2) {
           if (o1.equals(o2)) {
             return 0;
@@ -532,7 +530,7 @@ public class DoccatFineGrainedReportListener implements DoccatEvaluationMonitor
     }
 
     public SortedSet<String> getTokensOrderedByNumberOfErrors() {
-      SortedSet<String> toks = new TreeSet<String>(new Comparator<String>() {
+      SortedSet<String> toks = new TreeSet<>(new Comparator<String>() {
         public int compare(String o1, String o2) {
           if (o1.equals(o2)) {
             return 0;
@@ -573,7 +571,7 @@ public class DoccatFineGrainedReportListener implements DoccatEvaluationMonitor
     }
 
     public SortedSet<String> getTagsOrderedByErrors() {
-      SortedSet<String> tags = new TreeSet<String>(new Comparator<String>() {
+      SortedSet<String> tags = new TreeSet<>(new Comparator<String>() {
         public int compare(String o1, String o2) {
           if (o1.equals(o2)) {
             return 0;
@@ -638,9 +636,9 @@ public class DoccatFineGrainedReportListener implements DoccatEvaluationMonitor
 
     private SortedSet<String> getConfusionMatrixTagset(
         Map<String, ConfusionMatrixLine> data) {
-      SortedSet<String> tags = new TreeSet<String>(new CategoryComparator(data));
+      SortedSet<String> tags = new TreeSet<>(new CategoryComparator(data));
       tags.addAll(data.keySet());
-      List<String> col = new LinkedList<String>();
+      List<String> col = new LinkedList<>();
       for (String t : tags) {
         col.addAll(data.get(t).line.keySet());
       }
@@ -693,7 +691,7 @@ public class DoccatFineGrainedReportListener implements DoccatEvaluationMonitor
    */
   private static class ConfusionMatrixLine {
 
-    private Map<String, Counter> line = new HashMap<String, Counter>();
+    private Map<String, Counter> line = new HashMap<>();
     private String ref;
     private int total = 0;
     private int correct = 0;

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatTool.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatTool.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatTool.java
index d41dabb..dbc6e6b 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatTool.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatTool.java
@@ -54,7 +54,7 @@ public class DoccatTool extends BasicCmdLineTool {
 
       DocumentCategorizerME doccat = new DocumentCategorizerME(model);
 
-      /**
+      /*
        * moved initialization to the try block to catch new IOException
        */
       ObjectStream<String> documentStream;

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatTrainerTool.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatTrainerTool.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatTrainerTool.java
index 421c57f..91ee019 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatTrainerTool.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/doccat/DoccatTrainerTool.java
@@ -99,8 +99,7 @@ public class DoccatTrainerTool
 
   static FeatureGenerator[] createFeatureGenerators(String featureGeneratorsNames) {
     if(featureGeneratorsNames == null) {
-      FeatureGenerator[] def = {new BagOfWordsFeatureGenerator()};
-      return def;
+      return new FeatureGenerator[]{new BagOfWordsFeatureGenerator()};
     }
     String[] classes = featureGeneratorsNames.split(",");
     FeatureGenerator[] featureGenerators = new FeatureGenerator[classes.length];

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/entitylinker/EntityLinkerTool.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/entitylinker/EntityLinkerTool.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/entitylinker/EntityLinkerTool.java
index dbdb27a..f82e362 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/entitylinker/EntityLinkerTool.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/entitylinker/EntityLinkerTool.java
@@ -84,7 +84,7 @@ public class EntityLinkerTool extends BasicCmdLineTool {
         ObjectStream<String> untokenizedLineStream = new PlainTextByLineStream(
             new SystemInputStreamFactory(), SystemInputStreamFactory.encoding());
 
-        List<NameSample> document = new ArrayList<NameSample>();
+        List<NameSample> document = new ArrayList<>();
 
         String line;
         while ((line = untokenizedLineStream.read()) != null) {

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/lemmatizer/LemmatizerFineGrainedReportListener.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/lemmatizer/LemmatizerFineGrainedReportListener.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/lemmatizer/LemmatizerFineGrainedReportListener.java
index 7af495c..b72e889 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/lemmatizer/LemmatizerFineGrainedReportListener.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/lemmatizer/LemmatizerFineGrainedReportListener.java
@@ -459,7 +459,7 @@ public class LemmatizerFineGrainedReportListener
     printStream.append("\n");
   }
 
-  private static final String generateAlphaLabel(int index) {
+  private static String generateAlphaLabel(int index) {
 
     char labelChars[] = new char[3];
     int i;
@@ -484,20 +484,20 @@ public class LemmatizerFineGrainedReportListener
     private int maximumSentenceLength = Integer.MIN_VALUE;
 
     // token statistics
-    private final Map<String, Mean> tokAccuracies = new HashMap<String, Mean>();
-    private final Map<String, Counter> tokOcurrencies = new HashMap<String, Counter>();
-    private final Map<String, Counter> tokErrors = new HashMap<String, Counter>();
+    private final Map<String, Mean> tokAccuracies = new HashMap<>();
+    private final Map<String, Counter> tokOcurrencies = new HashMap<>();
+    private final Map<String, Counter> tokErrors = new HashMap<>();
 
     // tag statistics
-    private final Map<String, Counter> tagOcurrencies = new HashMap<String, Counter>();
-    private final Map<String, Counter> tagErrors = new HashMap<String, Counter>();
-    private final Map<String, FMeasure> tagFMeasure = new HashMap<String, FMeasure>();
+    private final Map<String, Counter> tagOcurrencies = new HashMap<>();
+    private final Map<String, Counter> tagErrors = new HashMap<>();
+    private final Map<String, FMeasure> tagFMeasure = new HashMap<>();
 
     // represents a Confusion Matrix that aggregates all tokens
-    private final Map<String, ConfusionMatrixLine> generalConfusionMatrix = new HashMap<String, ConfusionMatrixLine>();
+    private final Map<String, ConfusionMatrixLine> generalConfusionMatrix = new HashMap<>();
 
     // represents a set of Confusion Matrix for each token
-    private final Map<String, Map<String, ConfusionMatrixLine>> tokenConfusionMatrix = new HashMap<String, Map<String, ConfusionMatrixLine>>();
+    private final Map<String, Map<String, ConfusionMatrixLine>> tokenConfusionMatrix = new HashMap<>();
 
     public void add(LemmaSample reference, LemmaSample prediction) {
       int length = reference.getTokens().length;
@@ -576,13 +576,13 @@ public class LemmatizerFineGrainedReportListener
 
     private void updateTagFMeasure(String[] refs, String[] preds) {
       // create a set with all tags
-      Set<String> tags = new HashSet<String>(Arrays.asList(refs));
+      Set<String> tags = new HashSet<>(Arrays.asList(refs));
       tags.addAll(Arrays.asList(preds));
 
       // create samples for each tag
       for (String tag : tags) {
-        List<Span> reference = new ArrayList<Span>();
-        List<Span> prediction = new ArrayList<Span>();
+        List<Span> reference = new ArrayList<>();
+        List<Span> prediction = new ArrayList<>();
         for (int i = 0; i < refs.length; i++) {
           if (refs[i].equals(tag)) {
             reference.add(new Span(i, i + 1));
@@ -638,7 +638,7 @@ public class LemmatizerFineGrainedReportListener
     }
 
     public SortedSet<String> getTokensOrderedByFrequency() {
-      SortedSet<String> toks = new TreeSet<String>(new Comparator<String>() {
+      SortedSet<String> toks = new TreeSet<>(new Comparator<String>() {
         public int compare(String o1, String o2) {
           if (o1.equals(o2)) {
             return 0;
@@ -661,7 +661,7 @@ public class LemmatizerFineGrainedReportListener
     }
 
     public SortedSet<String> getTokensOrderedByNumberOfErrors() {
-      SortedSet<String> toks = new TreeSet<String>(new Comparator<String>() {
+      SortedSet<String> toks = new TreeSet<>(new Comparator<String>() {
         public int compare(String o1, String o2) {
           if (o1.equals(o2)) {
             return 0;
@@ -702,7 +702,7 @@ public class LemmatizerFineGrainedReportListener
     }
 
     public SortedSet<String> getTagsOrderedByErrors() {
-      SortedSet<String> tags = new TreeSet<String>(new Comparator<String>() {
+      SortedSet<String> tags = new TreeSet<>(new Comparator<String>() {
         public int compare(String o1, String o2) {
           if (o1.equals(o2)) {
             return 0;
@@ -767,10 +767,10 @@ public class LemmatizerFineGrainedReportListener
 
     private SortedSet<String> getConfusionMatrixTagset(
         Map<String, ConfusionMatrixLine> data) {
-      SortedSet<String> tags = new TreeSet<String>(
+      SortedSet<String> tags = new TreeSet<>(
           new CategoryComparator(data));
       tags.addAll(data.keySet());
-      List<String> col = new LinkedList<String>();
+      List<String> col = new LinkedList<>();
       for (String t : tags) {
         col.addAll(data.get(t).line.keySet());
       }
@@ -824,7 +824,7 @@ public class LemmatizerFineGrainedReportListener
    */
   private static class ConfusionMatrixLine {
 
-    private Map<String, Counter> line = new HashMap<String, Counter>();
+    private Map<String, Counter> line = new HashMap<>();
     private String ref;
     private int total = 0;
     private int correct = 0;

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/lemmatizer/LemmatizerMETool.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/lemmatizer/LemmatizerMETool.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/lemmatizer/LemmatizerMETool.java
index 8805bca..966111a 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/lemmatizer/LemmatizerMETool.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/lemmatizer/LemmatizerMETool.java
@@ -51,7 +51,7 @@ public class LemmatizerMETool extends BasicCmdLineTool {
 
       LemmatizerME lemmatizer = new LemmatizerME(model);
 
-      ObjectStream<String> lineStream = null;
+      ObjectStream<String> lineStream;
       PerformanceMonitor perfMon = null;
 
       try {

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/namefind/TokenNameFinderTool.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/namefind/TokenNameFinderTool.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/namefind/TokenNameFinderTool.java
index 7f05a78..6e8b86f 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/namefind/TokenNameFinderTool.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/namefind/TokenNameFinderTool.java
@@ -81,7 +81,7 @@ public final class TokenNameFinderTool extends BasicCmdLineTool {
             }
           }
 
-          List<Span> names = new ArrayList<Span>();
+          List<Span> names = new ArrayList<>();
 
           for (TokenNameFinder nameFinder : nameFinders) {
             Collections.addAll(names, nameFinder.find(whitespaceTokenizerLine));

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSModelLoader.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSModelLoader.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSModelLoader.java
index 2bdfe7b..40632cf 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSModelLoader.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSModelLoader.java
@@ -19,10 +19,8 @@ package opennlp.tools.cmdline.postag;
 
 import java.io.IOException;
 import java.io.InputStream;
-
 import opennlp.tools.cmdline.ModelLoader;
 import opennlp.tools.postag.POSModel;
-import opennlp.tools.util.InvalidFormatException;
 
 /**
  * Loads a POS Tagger Model for the command line tools.
@@ -36,8 +34,7 @@ public final class POSModelLoader extends ModelLoader<POSModel>{
   }
 
   @Override
-  protected POSModel loadModel(InputStream modelIn) throws IOException,
-      InvalidFormatException {
+  protected POSModel loadModel(InputStream modelIn) throws IOException {
     return new POSModel(modelIn);
   }
 }

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerFineGrainedReportListener.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerFineGrainedReportListener.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerFineGrainedReportListener.java
index 1a40a97..4149c97 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerFineGrainedReportListener.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerFineGrainedReportListener.java
@@ -466,7 +466,7 @@ public class POSTaggerFineGrainedReportListener implements
     printStream.append("\n");
   }
 
-  private static final String generateAlphaLabel(int index) {
+  private static String generateAlphaLabel(int index) {
 
     char labelChars[] = new char[3];
     int i;
@@ -491,20 +491,20 @@ public class POSTaggerFineGrainedReportListener implements
     private int maximumSentenceLength = Integer.MIN_VALUE;
 
     // token statistics
-    private final Map<String, Mean> tokAccuracies = new HashMap<String, Mean>();
-    private final Map<String, Counter> tokOcurrencies = new HashMap<String, Counter>();
-    private final Map<String, Counter> tokErrors = new HashMap<String, Counter>();
+    private final Map<String, Mean> tokAccuracies = new HashMap<>();
+    private final Map<String, Counter> tokOcurrencies = new HashMap<>();
+    private final Map<String, Counter> tokErrors = new HashMap<>();
 
     // tag statistics
-    private final Map<String, Counter> tagOcurrencies = new HashMap<String, Counter>();
-    private final Map<String, Counter> tagErrors = new HashMap<String, Counter>();
-    private final Map<String, FMeasure> tagFMeasure = new HashMap<String, FMeasure>();
+    private final Map<String, Counter> tagOcurrencies = new HashMap<>();
+    private final Map<String, Counter> tagErrors = new HashMap<>();
+    private final Map<String, FMeasure> tagFMeasure = new HashMap<>();
 
     // represents a Confusion Matrix that aggregates all tokens
-    private final Map<String, ConfusionMatrixLine> generalConfusionMatrix = new HashMap<String, ConfusionMatrixLine>();
+    private final Map<String, ConfusionMatrixLine> generalConfusionMatrix = new HashMap<>();
 
     // represents a set of Confusion Matrix for each token
-    private final Map<String, Map<String, ConfusionMatrixLine>> tokenConfusionMatrix = new HashMap<String, Map<String, ConfusionMatrixLine>>();
+    private final Map<String, Map<String, ConfusionMatrixLine>> tokenConfusionMatrix = new HashMap<>();
 
     public void add(POSSample reference, POSSample prediction) {
       int length = reference.getSentence().length;
@@ -583,13 +583,13 @@ public class POSTaggerFineGrainedReportListener implements
 
     private void updateTagFMeasure(String[] refs, String[] preds) {
       // create a set with all tags
-      Set<String> tags = new HashSet<String>(Arrays.asList(refs));
+      Set<String> tags = new HashSet<>(Arrays.asList(refs));
       tags.addAll(Arrays.asList(preds));
 
       // create samples for each tag
       for (String tag : tags) {
-        List<Span> reference = new ArrayList<Span>();
-        List<Span> prediction = new ArrayList<Span>();
+        List<Span> reference = new ArrayList<>();
+        List<Span> prediction = new ArrayList<>();
         for (int i = 0; i < refs.length; i++) {
           if (refs[i].equals(tag)) {
             reference.add(new Span(i, i + 1));
@@ -645,7 +645,7 @@ public class POSTaggerFineGrainedReportListener implements
     }
 
     public SortedSet<String> getTokensOrderedByFrequency() {
-      SortedSet<String> toks = new TreeSet<String>(new Comparator<String>() {
+      SortedSet<String> toks = new TreeSet<>(new Comparator<String>() {
         public int compare(String o1, String o2) {
           if (o1.equals(o2)) {
             return 0;
@@ -668,7 +668,7 @@ public class POSTaggerFineGrainedReportListener implements
     }
 
     public SortedSet<String> getTokensOrderedByNumberOfErrors() {
-      SortedSet<String> toks = new TreeSet<String>(new Comparator<String>() {
+      SortedSet<String> toks = new TreeSet<>(new Comparator<String>() {
         public int compare(String o1, String o2) {
           if (o1.equals(o2)) {
             return 0;
@@ -709,7 +709,7 @@ public class POSTaggerFineGrainedReportListener implements
     }
 
     public SortedSet<String> getTagsOrderedByErrors() {
-      SortedSet<String> tags = new TreeSet<String>(new Comparator<String>() {
+      SortedSet<String> tags = new TreeSet<>(new Comparator<String>() {
         public int compare(String o1, String o2) {
           if (o1.equals(o2)) {
             return 0;
@@ -764,8 +764,7 @@ public class POSTaggerFineGrainedReportListener implements
           column++;
         }
         // set accuracy
-        matrix[line][column] = data.get(ref) != null ? data.get(ref)
-            .getAccuracy() : 0;
+        matrix[line][column] = data.get(ref) != null ? data.get(ref).getAccuracy() : 0;
         line++;
       }
 
@@ -774,9 +773,9 @@ public class POSTaggerFineGrainedReportListener implements
 
     private SortedSet<String> getConfusionMatrixTagset(
         Map<String, ConfusionMatrixLine> data) {
-      SortedSet<String> tags = new TreeSet<String>(new CategoryComparator(data));
+      SortedSet<String> tags = new TreeSet<>(new CategoryComparator(data));
       tags.addAll(data.keySet());
-      List<String> col = new LinkedList<String>();
+      List<String> col = new LinkedList<>();
       for (String t : tags) {
         col.addAll(data.get(t).line.keySet());
       }
@@ -829,7 +828,7 @@ public class POSTaggerFineGrainedReportListener implements
    */
   private static class ConfusionMatrixLine {
 
-    private Map<String, Counter> line = new HashMap<String, Counter>();
+    private Map<String, Counter> line = new HashMap<>();
     private String ref;
     private int total = 0;
     private int correct = 0;

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerTool.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerTool.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerTool.java
index d2ceab8..2c4c661 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerTool.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerTool.java
@@ -51,7 +51,7 @@ public final class POSTaggerTool extends BasicCmdLineTool {
 
       POSTaggerME tagger = new POSTaggerME(model);
 
-      ObjectStream<String> lineStream = null;
+      ObjectStream<String> lineStream;
       PerformanceMonitor perfMon = null;
 
       try {

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerTrainerTool.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerTrainerTool.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerTrainerTool.java
index 42e4aa1..c04b5ef 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerTrainerTool.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/postag/POSTaggerTrainerTool.java
@@ -85,7 +85,7 @@ public final class POSTaggerTrainerTool
       System.err.println("done");
     }
 
-    POSTaggerFactory postaggerFactory = null;
+    POSTaggerFactory postaggerFactory;
     try {
       postaggerFactory = POSTaggerFactory.create(params.getFactory(), ngramDict, null);
     } catch (InvalidFormatException e) {
@@ -149,17 +149,19 @@ public final class POSTaggerTrainerTool
     if (modelString == null)
       modelString = "maxent";
 
-    if (modelString.equals("maxent")) {
-      model = ModelType.MAXENT;
-    }
-    else if (modelString.equals("perceptron")) {
-      model = ModelType.PERCEPTRON;
-    }
-    else if (modelString.equals("perceptron_sequence")) {
-      model = ModelType.PERCEPTRON_SEQUENCE;
-    }
-    else {
-      model = null;
+    switch (modelString) {
+      case "maxent":
+        model = ModelType.MAXENT;
+        break;
+      case "perceptron":
+        model = ModelType.PERCEPTRON;
+        break;
+      case "perceptron_sequence":
+        model = ModelType.PERCEPTRON_SEQUENCE;
+        break;
+      default:
+        model = null;
+        break;
     }
     return model;
   }

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/cmdline/tokenizer/CommandLineTokenizer.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/tokenizer/CommandLineTokenizer.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/tokenizer/CommandLineTokenizer.java
index 2c537a9..4c81d93 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/tokenizer/CommandLineTokenizer.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/tokenizer/CommandLineTokenizer.java
@@ -36,9 +36,9 @@ final class CommandLineTokenizer {
   }
 
   void process() {
-    ObjectStream<String> untokenizedLineStream = null;
+    ObjectStream<String> untokenizedLineStream;
 
-    ObjectStream<String> tokenizedLineStream = null;
+    ObjectStream<String> tokenizedLineStream;
     PerformanceMonitor perfMon = null;
     try {
       untokenizedLineStream =

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/dictionary/Dictionary.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/dictionary/Dictionary.java b/opennlp-tools/src/main/java/opennlp/tools/dictionary/Dictionary.java
index aa1fef8..58b7a6e 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/dictionary/Dictionary.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/dictionary/Dictionary.java
@@ -28,12 +28,10 @@ import java.util.HashSet;
 import java.util.Iterator;
 import java.util.Set;
 import java.util.StringTokenizer;
-
 import opennlp.tools.dictionary.serializer.Attributes;
 import opennlp.tools.dictionary.serializer.DictionarySerializer;
 import opennlp.tools.dictionary.serializer.Entry;
 import opennlp.tools.dictionary.serializer.EntryInserter;
-import opennlp.tools.util.InvalidFormatException;
 import opennlp.tools.util.StringList;
 import opennlp.tools.util.StringUtil;
 
@@ -91,7 +89,7 @@ public class Dictionary implements Iterable<StringList> {
     }
   }
 
-  private Set<StringListWrapper> entrySet = new HashSet<StringListWrapper>();
+  private Set<StringListWrapper> entrySet = new HashSet<>();
   private final boolean isCaseSensitive;
   private int minTokenCount = 99999;
   private int maxTokenCount = 0;
@@ -113,9 +111,8 @@ public class Dictionary implements Iterable<StringList> {
    *
    * @param in
    * @throws IOException
-   * @throws InvalidFormatException
    */
-  public Dictionary(InputStream in) throws IOException, InvalidFormatException {
+  public Dictionary(InputStream in) throws IOException {
     isCaseSensitive = DictionarySerializer.create(in, new EntryInserter() {
       public void insert(Entry entry) {
         put(entry.getTokens());
@@ -136,9 +133,8 @@ public class Dictionary implements Iterable<StringList> {
    * @param caseSensitive
    *          has no effect
    * @throws IOException
-   * @throws InvalidFormatException
    */
-  public Dictionary(InputStream in, boolean caseSensitive) throws IOException, InvalidFormatException {
+  public Dictionary(InputStream in, boolean caseSensitive) throws IOException {
     this(in);
   }
 
@@ -173,7 +169,6 @@ public class Dictionary implements Iterable<StringList> {
    * Checks if this dictionary has the given entry.
    *
    * @param tokens
-   *
    * @return true if it contains the entry otherwise false
    */
   public boolean contains(StringList tokens) {

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/dictionary/Index.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/dictionary/Index.java b/opennlp-tools/src/main/java/opennlp/tools/dictionary/Index.java
index df4e9c5..7d1245e 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/dictionary/Index.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/dictionary/Index.java
@@ -30,7 +30,7 @@ import opennlp.tools.util.StringList;
  */
 public class Index {
 
-  private Set<String> tokens = new HashSet<String>();
+  private Set<String> tokens = new HashSet<>();
 
   /**
    * Initializes the current instance with the given

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/dictionary/serializer/Attributes.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/dictionary/serializer/Attributes.java b/opennlp-tools/src/main/java/opennlp/tools/dictionary/serializer/Attributes.java
index 1b2b488..cd737c8 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/dictionary/serializer/Attributes.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/dictionary/serializer/Attributes.java
@@ -30,7 +30,7 @@ import java.util.Map;
  */
 public class Attributes {
 
-  private Map<String, String> mNameValueMap = new HashMap<String, String>();
+  private Map<String, String> mNameValueMap = new HashMap<>();
 
   /**
    * Retrieves the value for the given key or null if attribute it not set.

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/dictionary/serializer/DictionarySerializer.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/dictionary/serializer/DictionarySerializer.java b/opennlp-tools/src/main/java/opennlp/tools/dictionary/serializer/DictionarySerializer.java
index feb96a3..a0b3dcb 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/dictionary/serializer/DictionarySerializer.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/dictionary/serializer/DictionarySerializer.java
@@ -59,7 +59,7 @@ public class DictionarySerializer {
     private boolean mIsInsideTokenElement;
     private boolean mIsCaseSensitiveDictionary;
 
-    private List<String> mTokenList = new LinkedList<String>();
+    private List<String> mTokenList = new LinkedList<>();
 
     private StringBuilder token = new StringBuilder();
 
@@ -209,7 +209,7 @@ public class DictionarySerializer {
    * @throws InvalidFormatException
    */
   public static boolean create(InputStream in, EntryInserter inserter)
-      throws IOException, InvalidFormatException {
+      throws IOException {
 
     DictionaryContenthandler profileContentHandler =
         new DictionaryContenthandler(inserter);
@@ -299,8 +299,7 @@ public class DictionarySerializer {
       hd.endDocument();
     }
     catch (SAXException e) {
-      //TODO update after Java6 upgrade
-      throw (IOException) new IOException("Error during serialization: " + e.getMessage()).initCause(e);
+      throw new IOException("Error during serialization: " + e.getMessage(), e);
     }
   }
 
@@ -320,14 +319,11 @@ public class DictionarySerializer {
 
     StringList tokens = entry.getTokens();
 
-    for (Iterator<String> it = tokens.iterator(); it.hasNext(); ) {
+    for (String token1 : tokens) {
 
       hd.startElement("", "", TOKEN_ELEMENT, new AttributesImpl());
 
-      String token = it.next();
-
-      hd.characters(token.toCharArray(),
-          0, token.length());
+      hd.characters(token1.toCharArray(), 0, token1.length());
 
       hd.endElement("", "", TOKEN_ELEMENT);
     }

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/doccat/BagOfWordsFeatureGenerator.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/doccat/BagOfWordsFeatureGenerator.java b/opennlp-tools/src/main/java/opennlp/tools/doccat/BagOfWordsFeatureGenerator.java
index 848f4e1..ac39afc 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/doccat/BagOfWordsFeatureGenerator.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/doccat/BagOfWordsFeatureGenerator.java
@@ -41,7 +41,7 @@ public class BagOfWordsFeatureGenerator implements FeatureGenerator {
   @Override
   public Collection<String> extractFeatures(String[] text, Map<String, Object> extraInformation) {
 
-    Collection<String> bagOfWords = new ArrayList<String>(text.length);
+    Collection<String> bagOfWords = new ArrayList<>(text.length);
 
     for (String word : text) {
       if (useOnlyAllLetterTokens) {

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatCrossValidator.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatCrossValidator.java b/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatCrossValidator.java
index dbce077..106b82d 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatCrossValidator.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatCrossValidator.java
@@ -65,7 +65,7 @@ public class DoccatCrossValidator {
   public void evaluate(ObjectStream<DocumentSample> samples, int nFolds)
       throws IOException {
 
-    CrossValidationPartitioner<DocumentSample> partitioner = new CrossValidationPartitioner<DocumentSample>(
+    CrossValidationPartitioner<DocumentSample> partitioner = new CrossValidationPartitioner<>(
         samples, nFolds);
 
     while (partitioner.hasNext()) {

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatFactory.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatFactory.java b/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatFactory.java
index 9b30d95..b95671a 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatFactory.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatFactory.java
@@ -140,8 +140,7 @@ public class DoccatFactory extends BaseToolFactory {
       }
       if (featureGenerators == null) { // could not load using artifact provider
         // load bag of words as default
-        FeatureGenerator[] bow = {new BagOfWordsFeatureGenerator()};
-        this.featureGenerators = bow;
+        this.featureGenerators = new FeatureGenerator[]{new BagOfWordsFeatureGenerator()};
       }
     }
     return featureGenerators;

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatModel.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatModel.java b/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatModel.java
index 2bbaace..e71b625 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatModel.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/doccat/DoccatModel.java
@@ -45,15 +45,15 @@ public class DoccatModel extends BaseModel {
     checkArtifactMap();
   }
 
-  public DoccatModel(InputStream in) throws IOException, InvalidFormatException {
+  public DoccatModel(InputStream in) throws IOException {
     super(COMPONENT_NAME, in);
   }
 
-  public DoccatModel(File modelFile) throws IOException, InvalidFormatException {
+  public DoccatModel(File modelFile) throws IOException {
     super(COMPONENT_NAME, modelFile);
   }
 
-  public DoccatModel(URL modelURL) throws IOException, InvalidFormatException {
+  public DoccatModel(URL modelURL) throws IOException {
     super(COMPONENT_NAME, modelURL);
   }
 

http://git-wip-us.apache.org/repos/asf/opennlp/blob/afc6b65b/opennlp-tools/src/main/java/opennlp/tools/doccat/DocumentCategorizerContextGenerator.java
----------------------------------------------------------------------
diff --git a/opennlp-tools/src/main/java/opennlp/tools/doccat/DocumentCategorizerContextGenerator.java b/opennlp-tools/src/main/java/opennlp/tools/doccat/DocumentCategorizerContextGenerator.java
index b62d8eb..737b9d1 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/doccat/DocumentCategorizerContextGenerator.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/doccat/DocumentCategorizerContextGenerator.java
@@ -33,7 +33,7 @@ class DocumentCategorizerContextGenerator {
 
   public String[] getContext(String text[], Map<String, Object> extraInformation) {
 
-    Collection<String> context = new LinkedList<String>();
+    Collection<String> context = new LinkedList<>();
 
     for (FeatureGenerator mFeatureGenerator : mFeatureGenerators) {
       Collection<String> extractedFeatures =