You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mahout.apache.org by sm...@apache.org on 2015/03/25 07:49:24 UTC

[3/3] mahout git commit: MAHOUT-1652: Java 7 update

MAHOUT-1652: Java 7 update


Project: http://git-wip-us.apache.org/repos/asf/mahout/repo
Commit: http://git-wip-us.apache.org/repos/asf/mahout/commit/87c15bea
Tree: http://git-wip-us.apache.org/repos/asf/mahout/tree/87c15bea
Diff: http://git-wip-us.apache.org/repos/asf/mahout/diff/87c15bea

Branch: refs/heads/master
Commit: 87c15bea46081baab98db9a0dbd480c5dac5ceb4
Parents: ec8a5a0
Author: Suneel Marthi <su...@gmail.com>
Authored: Wed Mar 25 02:43:44 2015 -0400
Committer: Suneel Marthi <su...@gmail.com>
Committed: Wed Mar 25 02:43:44 2015 -0400

----------------------------------------------------------------------
 .../taste/example/email/MailToPrefsDriver.java  |  4 +-
 .../cf/taste/example/email/MailToRecMapper.java |  4 +-
 .../taste/example/kddcup/DataFileIterator.java  |  2 +-
 .../taste/example/kddcup/KDDCupDataModel.java   |  8 +--
 .../track1/svd/ParallelArraysSGDFactorizer.java |  4 +-
 .../example/kddcup/track2/Track2Callable.java   |  2 +-
 .../kddcup/track2/TrackItemSimilarity.java      |  2 +-
 .../classifier/df/mapreduce/TestForest.java     |  2 +-
 .../sequencelearning/hmm/PosTagger.java         |  3 +-
 .../tools/ClusterQualitySummarizer.java         | 12 ++--
 .../model/cassandra/CassandraDataModel.java     | 20 +++---
 .../impl/model/jdbc/AbstractJDBCDataModel.java  |  6 +-
 .../mahout/cf/taste/web/RecommenderServlet.java |  4 +-
 .../mahout/clustering/cdbw/CDbwEvaluator.java   | 12 ++--
 .../clustering/conversion/InputMapper.java      | 10 +--
 .../clustering/evaluation/ClusterEvaluator.java |  2 +-
 .../mahout/clustering/lda/LDAPrintTopics.java   |  6 +-
 .../text/MultipleTextFileInputFormat.java       |  4 +-
 .../mahout/text/PrefixAdditionFilter.java       |  2 +-
 .../text/ReadOnlyFileSystemDirectory.java       |  4 +-
 .../mahout/text/SequenceFilesFromDirectory.java |  4 +-
 .../SequenceFilesFromLuceneStorageMRJob.java    |  6 +-
 .../text/SequenceFilesFromMailArchives.java     |  4 +-
 .../mahout/text/WholeFileRecordReader.java      |  6 +-
 .../mahout/text/WikipediaToSequenceFile.java    | 10 +--
 .../org/apache/mahout/utils/MatrixDumper.java   |  2 +-
 .../apache/mahout/utils/SequenceFileDumper.java | 16 ++---
 .../org/apache/mahout/utils/SplitInput.java     | 12 ++--
 .../org/apache/mahout/utils/SplitInputJob.java  |  2 +-
 .../utils/clustering/AbstractClusterWriter.java |  2 +-
 .../mahout/utils/vectors/VectorDumper.java      |  5 +-
 .../mahout/utils/vectors/VectorHelper.java      |  9 +--
 .../utils/vectors/lucene/ClusterLabels.java     |  4 +-
 .../text/LuceneSegmentRecordReaderTest.java     | 25 ++++---
 .../text/TestSequenceFilesFromDirectory.java    |  9 +--
 .../mahout/utils/TestConcatenateVectorsJob.java |  2 +-
 .../mahout/cf/taste/common/Refreshable.java     |  2 +-
 .../apache/mahout/cf/taste/hadoop/als/ALS.java  |  5 +-
 .../hadoop/als/ParallelALSFactorizationJob.java | 14 ++--
 .../cf/taste/hadoop/als/PredictionMapper.java   |  2 +-
 .../item/AggregateAndRecommendReducer.java      |  4 +-
 .../mahout/cf/taste/hadoop/item/IDReader.java   | 13 ++--
 .../hadoop/item/UserVectorSplitterMapper.java   |  3 +
 .../AbstractDifferenceRecommenderEvaluator.java |  4 +-
 .../GenericRecommenderIRStatsEvaluator.java     |  2 +-
 .../impl/model/GenericBooleanPrefDataModel.java |  4 +-
 .../cf/taste/impl/model/GenericDataModel.java   |  4 +-
 .../cf/taste/impl/model/MemoryIDMigrator.java   |  2 +-
 .../PlusAnonymousConcurrentUserDataModel.java   |  6 +-
 .../cf/taste/impl/model/file/FileDataModel.java | 12 ++--
 .../taste/impl/model/file/FileIDMigrator.java   |  4 +-
 .../neighborhood/CachingUserNeighborhood.java   |  2 +-
 .../impl/recommender/CachingRecommender.java    |  4 +-
 .../recommender/ItemAverageRecommender.java     |  2 +-
 .../recommender/ItemUserAverageRecommender.java |  4 +-
 .../cf/taste/impl/recommender/NullRescorer.java |  4 +-
 .../SamplingCandidateItemsStrategy.java         |  2 +-
 .../cf/taste/impl/recommender/TopItems.java     |  8 +--
 .../similarity/AveragingPreferenceInferrer.java |  2 +-
 .../impl/similarity/CachingItemSimilarity.java  |  2 +-
 .../impl/similarity/CachingUserSimilarity.java  |  2 +-
 .../impl/similarity/GenericItemSimilarity.java  |  6 +-
 .../impl/similarity/GenericUserSimilarity.java  |  4 +-
 .../MultithreadedBatchItemSimilarities.java     |  4 +-
 .../mahout/cf/taste/model/IDMigrator.java       |  2 -
 .../classifier/naivebayes/BayesUtils.java       |  4 +-
 .../naivebayes/test/BayesTestMapper.java        |  2 +-
 .../naivebayes/test/TestNaiveBayesDriver.java   |  6 +-
 .../sequencelearning/hmm/BaumWelchTrainer.java  |  5 +-
 .../sequencelearning/hmm/ViterbiEvaluator.java  |  5 +-
 .../mahout/clustering/AbstractCluster.java      |  2 +-
 .../WeightedPropertyVectorWritable.java         |  2 +-
 .../clustering/fuzzykmeans/FuzzyKMeansUtil.java |  2 +-
 .../clustering/iterator/ClusterIterator.java    |  2 +-
 .../mahout/clustering/kmeans/KMeansUtil.java    |  2 +-
 .../clustering/kmeans/RandomSeedGenerator.java  |  4 +-
 .../cvb/InMemoryCollapsedVariationalBayes0.java |  2 +-
 .../mahout/clustering/lda/cvb/ModelTrainer.java |  2 +-
 .../mahout/clustering/lda/cvb/TopicModel.java   |  2 +-
 .../mahout/clustering/spectral/VectorCache.java |  2 +-
 .../spectral/kmeans/EigenSeedGenerator.java     |  4 +-
 .../streaming/cluster/BallKMeans.java           |  4 +-
 .../postprocessor/ClusterCountReader.java       |  4 +-
 .../ClusterOutputPostProcessorDriver.java       |  2 +-
 .../org/apache/mahout/common/ClassUtils.java    | 16 +----
 .../org/apache/mahout/common/HadoopUtil.java    |  6 +-
 .../java/org/apache/mahout/common/IOUtils.java  |  6 +-
 .../java/org/apache/mahout/common/Pair.java     |  4 +-
 .../org/apache/mahout/common/Parameters.java    |  4 +-
 .../iterator/CopyConstructorIterator.java       |  6 +-
 .../StableFixedSizeSamplingIterator.java        |  4 +-
 .../common/iterator/StringRecordIterator.java   |  2 +-
 .../sequencefile/SequenceFileDirIterable.java   |  2 +-
 .../sequencefile/SequenceFileDirIterator.java   |  2 +-
 .../SequenceFileDirValueIterable.java           |  2 +-
 .../SequenceFileDirValueIterator.java           |  2 +-
 .../sequencefile/SequenceFileIterable.java      |  2 +-
 .../sequencefile/SequenceFileIterator.java      |  2 +-
 .../sequencefile/SequenceFileValueIterable.java |  2 +-
 .../sequencefile/SequenceFileValueIterator.java |  2 +-
 .../org/apache/mahout/driver/MahoutDriver.java  |  2 -
 .../main/java/org/apache/mahout/ep/State.java   |  2 +-
 .../org/apache/mahout/math/MatrixUtils.java     |  2 +-
 .../math/hadoop/DistributedRowMatrix.java       |  2 +-
 .../math/hadoop/MatrixColumnMeansJob.java       |  2 +-
 .../mahout/math/hadoop/TimesSquaredJob.java     |  4 +-
 .../math/hadoop/similarity/SeedVectorUtil.java  |  2 +-
 .../hadoop/similarity/cooccurrence/Vectors.java |  2 +-
 .../DistributedConjugateGradientSolver.java     | 14 ++--
 .../mahout/math/hadoop/stats/BasicStats.java    |  2 +-
 .../hadoop/stochasticsvd/ABtDenseOutJob.java    | 14 +---
 .../math/hadoop/stochasticsvd/ABtJob.java       | 12 +---
 .../mahout/math/hadoop/stochasticsvd/BtJob.java | 14 ++--
 .../math/hadoop/stochasticsvd/SSVDHelper.java   | 74 ++++++++++----------
 .../math/hadoop/stochasticsvd/SSVDSolver.java   |  1 -
 .../mahout/math/hadoop/stochasticsvd/UJob.java  |  1 -
 .../hadoop/stochasticsvd/qr/QRFirstStep.java    |  3 +-
 .../hadoop/stochasticsvd/qr/QRLastStep.java     |  4 +-
 .../mahout/math/neighborhood/BruteSearch.java   |  8 +--
 .../math/neighborhood/FastProjectionSearch.java |  8 +--
 .../LocalitySensitiveHashSearch.java            |  4 +-
 .../math/neighborhood/ProjectionSearch.java     | 12 ++--
 .../mahout/math/random/RandomProjector.java     |  2 +-
 .../math/ssvd/SequentialOutOfCoreSvd.java       | 44 +++---------
 .../mahout/vectorizer/DictionaryVectorizer.java |  2 +-
 .../collocations/llr/CollocMapper.java          | 12 ++--
 .../encoders/CachingContinuousValueEncoder.java |  6 +-
 .../encoders/CachingStaticWordValueEncoder.java |  6 +-
 .../vectorizer/term/TFPartialVectorReducer.java |  2 +-
 .../mahout/vectorizer/term/TermCountMapper.java |  2 +-
 .../mahout/vectorizer/tfidf/TFIDFConverter.java |  2 +-
 pom.xml                                         |  8 +--
 132 files changed, 324 insertions(+), 433 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/examples/src/main/java/org/apache/mahout/cf/taste/example/email/MailToPrefsDriver.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/mahout/cf/taste/example/email/MailToPrefsDriver.java b/examples/src/main/java/org/apache/mahout/cf/taste/example/email/MailToPrefsDriver.java
index 09bd817..1d150ae 100644
--- a/examples/src/main/java/org/apache/mahout/cf/taste/example/email/MailToPrefsDriver.java
+++ b/examples/src/main/java/org/apache/mahout/cf/taste/example/email/MailToPrefsDriver.java
@@ -55,7 +55,7 @@ import org.slf4j.LoggerFactory;
 
 /**
  * Convert the Mail archives (see {@link org.apache.mahout.text.SequenceFilesFromMailArchives}) to a preference
- * file that can be consumed by the {@link org.apache.mahout.cf.taste.hadoop.pseudo.RecommenderJob}.
+ * file that can be consumed by the {@link org.apache.mahout.cf.taste.hadoop.item.RecommenderJob}.
  * <p/>
  * This assumes the input is a Sequence File, that the key is: filename/message id and the value is a list
  * (separated by the user's choosing) containing the from email and any references
@@ -246,7 +246,7 @@ public final class MailToPrefsDriver extends AbstractJob {
       Path filesPattern = new Path(inputPath, OUTPUT_FILES_PATTERN);
       int i = 1; //start at 1, since a miss in the OpenObjectIntHashMap returns a 0
       for (Pair<Writable, Writable> record
-              : new SequenceFileDirIterable<Writable, Writable>(filesPattern, PathType.GLOB, null, null, true, conf)) {
+              : new SequenceFileDirIterable<>(filesPattern, PathType.GLOB, null, null, true, conf)) {
         if (currentChunkSize > chunkSizeLimit) {
           Closeables.close(dictWriter, false);
           chunkIndex++;

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/examples/src/main/java/org/apache/mahout/cf/taste/example/email/MailToRecMapper.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/mahout/cf/taste/example/email/MailToRecMapper.java b/examples/src/main/java/org/apache/mahout/cf/taste/example/email/MailToRecMapper.java
index 2de08cc..91bbd17 100644
--- a/examples/src/main/java/org/apache/mahout/cf/taste/example/email/MailToRecMapper.java
+++ b/examples/src/main/java/org/apache/mahout/cf/taste/example/email/MailToRecMapper.java
@@ -32,8 +32,8 @@ public final class MailToRecMapper extends Mapper<Text, Text, Text, LongWritable
 
   private static final Logger log = LoggerFactory.getLogger(MailToRecMapper.class);
 
-  private final OpenObjectIntHashMap<String> fromDictionary = new OpenObjectIntHashMap<String>();
-  private final OpenObjectIntHashMap<String> msgIdDictionary = new OpenObjectIntHashMap<String>();
+  private final OpenObjectIntHashMap<String> fromDictionary = new OpenObjectIntHashMap<>();
+  private final OpenObjectIntHashMap<String> msgIdDictionary = new OpenObjectIntHashMap<>();
   private String separator = "\n";
   private int fromIdx;
   private int refsIdx;

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/DataFileIterator.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/DataFileIterator.java b/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/DataFileIterator.java
index d0c9f68..786e080 100644
--- a/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/DataFileIterator.java
+++ b/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/DataFileIterator.java
@@ -112,7 +112,7 @@ public final class DataFileIterator
       ratingsLeftToRead--;
     }
 
-    return new Pair<PreferenceArray,long[]>(currentUserPrefs, timestamps);
+    return new Pair<>(currentUserPrefs, timestamps);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/KDDCupDataModel.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/KDDCupDataModel.java b/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/KDDCupDataModel.java
index 1c26027..4b62050 100644
--- a/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/KDDCupDataModel.java
+++ b/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/KDDCupDataModel.java
@@ -72,11 +72,11 @@ public final class KDDCupDataModel implements DataModel {
 
     Iterator<Pair<PreferenceArray,long[]>> dataIterator = new DataFileIterator(dataFile);
     if (samplingRate < 1.0) {
-      dataIterator = new SamplingIterator<Pair<PreferenceArray,long[]>>(dataIterator, samplingRate);
+      dataIterator = new SamplingIterator<>(dataIterator, samplingRate);
     }
 
-    FastByIDMap<PreferenceArray> userData = new FastByIDMap<PreferenceArray>();
-    FastByIDMap<FastByIDMap<Long>> timestamps = new FastByIDMap<FastByIDMap<Long>>();
+    FastByIDMap<PreferenceArray> userData = new FastByIDMap<>();
+    FastByIDMap<FastByIDMap<Long>> timestamps = new FastByIDMap<>();
 
     while (dataIterator.hasNext()) {
 
@@ -86,7 +86,7 @@ public final class KDDCupDataModel implements DataModel {
 
       userData.put(userPrefs.getUserID(0), userPrefs);
       if (storeDates) {
-        FastByIDMap<Long> itemTimestamps = new FastByIDMap<Long>();
+        FastByIDMap<Long> itemTimestamps = new FastByIDMap<>();
         for (int i = 0; i < timestampsForPrefs.length; i++) {
           long timestamp = timestampsForPrefs[i];
           if (timestamp > 0L) {

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track1/svd/ParallelArraysSGDFactorizer.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track1/svd/ParallelArraysSGDFactorizer.java b/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track1/svd/ParallelArraysSGDFactorizer.java
index 715ebc3..a99d54c 100644
--- a/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track1/svd/ParallelArraysSGDFactorizer.java
+++ b/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track1/svd/ParallelArraysSGDFactorizer.java
@@ -107,7 +107,7 @@ public class ParallelArraysSGDFactorizer implements Factorizer {
     int numPrefs = factorizablePreferences.numPreferences();
 
     log.info("Mapping {} users...", numUsers);
-    userIDMapping = new FastByIDMap<Integer>(numUsers);
+    userIDMapping = new FastByIDMap<>(numUsers);
     int index = 0;
     LongPrimitiveIterator userIterator = factorizablePreferences.getUserIDs();
     while (userIterator.hasNext()) {
@@ -115,7 +115,7 @@ public class ParallelArraysSGDFactorizer implements Factorizer {
     }
 
     log.info("Mapping {} items", numItems);
-    itemIDMapping = new FastByIDMap<Integer>(numItems);
+    itemIDMapping = new FastByIDMap<>(numItems);
     index = 0;
     LongPrimitiveIterator itemIterator = factorizablePreferences.getItemIDs();
     while (itemIterator.hasNext()) {

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track2/Track2Callable.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track2/Track2Callable.java b/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track2/Track2Callable.java
index 2454017..7ecc5fe 100644
--- a/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track2/Track2Callable.java
+++ b/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track2/Track2Callable.java
@@ -54,7 +54,7 @@ final class Track2Callable implements Callable<UserResult> {
       throw new IllegalArgumentException("Expecting 6 items for user but got " + userTest);
     }
     long userID = userTest.get(0).getUserID();
-    TreeMap<Double,Long> estimateToItemID = new TreeMap<Double,Long>(Collections.reverseOrder());
+    TreeMap<Double,Long> estimateToItemID = new TreeMap<>(Collections.reverseOrder());
 
     for (int i = 0; i < testSize; i++) {
       long itemID = userTest.getItemID(i);

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track2/TrackItemSimilarity.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track2/TrackItemSimilarity.java b/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track2/TrackItemSimilarity.java
index 54f2fa7..3012a84 100644
--- a/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track2/TrackItemSimilarity.java
+++ b/examples/src/main/java/org/apache/mahout/cf/taste/example/kddcup/track2/TrackItemSimilarity.java
@@ -34,7 +34,7 @@ final class TrackItemSimilarity implements ItemSimilarity {
   private final FastByIDMap<TrackData> trackData;
 
   TrackItemSimilarity(File dataFileDirectory) throws IOException {
-    trackData = new FastByIDMap<TrackData>();
+    trackData = new FastByIDMap<>();
     for (String line : new FileLineIterable(KDDCupDataModel.getTrackFile(dataFileDirectory))) {
       TrackData trackDatum = new TrackData(line);
       trackData.put(trackDatum.getTrackID(), trackDatum);

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/examples/src/main/java/org/apache/mahout/classifier/df/mapreduce/TestForest.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/mahout/classifier/df/mapreduce/TestForest.java b/examples/src/main/java/org/apache/mahout/classifier/df/mapreduce/TestForest.java
index 411c68f..9ce2104 100644
--- a/examples/src/main/java/org/apache/mahout/classifier/df/mapreduce/TestForest.java
+++ b/examples/src/main/java/org/apache/mahout/classifier/df/mapreduce/TestForest.java
@@ -227,7 +227,7 @@ public class TestForest extends Configured implements Tool {
     Random rng = RandomUtils.getRandom();
 
     List<double[]> resList = Lists.newArrayList();
-    if (dataFS.getFileStatus(dataPath).isDir()) {
+    if (dataFS.getFileStatus(dataPath).isDirectory()) {
       //the input is a directory of files
       testDirectory(outputPath, converter, forest, dataset, resList, rng);
     }  else {

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/examples/src/main/java/org/apache/mahout/classifier/sequencelearning/hmm/PosTagger.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/mahout/classifier/sequencelearning/hmm/PosTagger.java b/examples/src/main/java/org/apache/mahout/classifier/sequencelearning/hmm/PosTagger.java
index 4864021..4ba844d 100644
--- a/examples/src/main/java/org/apache/mahout/classifier/sequencelearning/hmm/PosTagger.java
+++ b/examples/src/main/java/org/apache/mahout/classifier/sequencelearning/hmm/PosTagger.java
@@ -27,7 +27,6 @@ import java.util.regex.Pattern;
 import com.google.common.base.Charsets;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import com.google.common.io.CharStreams;
 import com.google.common.io.Resources;
 import org.apache.mahout.math.Matrix;
 import org.slf4j.Logger;
@@ -122,7 +121,7 @@ public final class PosTagger {
     List<Integer> observedSequence = Lists.newLinkedList();
     List<Integer> hiddenSequence = Lists.newLinkedList();
 
-    for (String line : CharStreams.readLines(Resources.newReaderSupplier(new URL(url), Charsets.UTF_8))) {
+    for (String line :Resources.readLines(new URL(url), Charsets.UTF_8)) {
       if (line.isEmpty()) {
         // new sentence starts
         int[] observedSequenceArray = new int[observedSequence.size()];

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/examples/src/main/java/org/apache/mahout/clustering/streaming/tools/ClusterQualitySummarizer.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/mahout/clustering/streaming/tools/ClusterQualitySummarizer.java b/examples/src/main/java/org/apache/mahout/clustering/streaming/tools/ClusterQualitySummarizer.java
index ffc07a4..038a097 100644
--- a/examples/src/main/java/org/apache/mahout/clustering/streaming/tools/ClusterQualitySummarizer.java
+++ b/examples/src/main/java/org/apache/mahout/clustering/streaming/tools/ClusterQualitySummarizer.java
@@ -110,23 +110,23 @@ public class ClusterQualitySummarizer extends AbstractJob {
       List<Centroid> centroidsCompare = null;
       if (mahoutKMeansFormat) {
         SequenceFileDirValueIterable<ClusterWritable> clusterIterable =
-            new SequenceFileDirValueIterable<ClusterWritable>(new Path(centroidFile), PathType.GLOB, conf);
+            new SequenceFileDirValueIterable<>(new Path(centroidFile), PathType.GLOB, conf);
         centroids = Lists.newArrayList(IOUtils.getCentroidsFromClusterWritableIterable(clusterIterable));
       } else {
         SequenceFileDirValueIterable<CentroidWritable> centroidIterable =
-            new SequenceFileDirValueIterable<CentroidWritable>(new Path(centroidFile), PathType.GLOB, conf);
+            new SequenceFileDirValueIterable<>(new Path(centroidFile), PathType.GLOB, conf);
         centroids = Lists.newArrayList(IOUtils.getCentroidsFromCentroidWritableIterable(centroidIterable));
       }
 
       if (centroidCompareFile != null) {
         if (mahoutKMeansFormatCompare) {
           SequenceFileDirValueIterable<ClusterWritable> clusterCompareIterable =
-              new SequenceFileDirValueIterable<ClusterWritable>(new Path(centroidCompareFile), PathType.GLOB, conf);
+              new SequenceFileDirValueIterable<>(new Path(centroidCompareFile), PathType.GLOB, conf);
           centroidsCompare = Lists.newArrayList(
               IOUtils.getCentroidsFromClusterWritableIterable(clusterCompareIterable));
         } else {
           SequenceFileDirValueIterable<CentroidWritable> centroidCompareIterable =
-              new SequenceFileDirValueIterable<CentroidWritable>(new Path(centroidCompareFile), PathType.GLOB, conf);
+              new SequenceFileDirValueIterable<>(new Path(centroidCompareFile), PathType.GLOB, conf);
           centroidsCompare = Lists.newArrayList(
               IOUtils.getCentroidsFromCentroidWritableIterable(centroidCompareIterable));
         }
@@ -134,7 +134,7 @@ public class ClusterQualitySummarizer extends AbstractJob {
 
       // Reading in the "training" set.
       SequenceFileDirValueIterable<VectorWritable> trainIterable =
-          new SequenceFileDirValueIterable<VectorWritable>(new Path(trainFile), PathType.GLOB, conf);
+          new SequenceFileDirValueIterable<>(new Path(trainFile), PathType.GLOB, conf);
       Iterable<Vector> trainDatapoints = IOUtils.getVectorsFromVectorWritableIterable(trainIterable);
       Iterable<Vector> datapoints = trainDatapoints;
 
@@ -144,7 +144,7 @@ public class ClusterQualitySummarizer extends AbstractJob {
       // Also adding in the "test" set.
       if (testFile != null) {
         SequenceFileDirValueIterable<VectorWritable> testIterable =
-            new SequenceFileDirValueIterable<VectorWritable>(new Path(testFile), PathType.GLOB, conf);
+            new SequenceFileDirValueIterable<>(new Path(testFile), PathType.GLOB, conf);
         Iterable<Vector> testDatapoints = IOUtils.getVectorsFromVectorWritableIterable(testIterable);
 
         printSummaries(ClusteringUtils.summarizeClusterDistances(testDatapoints, centroids,

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/cf/taste/impl/model/cassandra/CassandraDataModel.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/cf/taste/impl/model/cassandra/CassandraDataModel.java b/integration/src/main/java/org/apache/mahout/cf/taste/impl/model/cassandra/CassandraDataModel.java
index 68f70c3..b220993 100644
--- a/integration/src/main/java/org/apache/mahout/cf/taste/impl/model/cassandra/CassandraDataModel.java
+++ b/integration/src/main/java/org/apache/mahout/cf/taste/impl/model/cassandra/CassandraDataModel.java
@@ -133,12 +133,12 @@ public final class CassandraDataModel implements DataModel, Closeable {
     keyspace = HFactory.createKeyspace(keyspaceName, cluster);
     keyspace.setConsistencyLevelPolicy(new OneConsistencyLevelPolicy());
 
-    userCache = new Cache<Long,PreferenceArray>(new UserPrefArrayRetriever(), 1 << 20);
-    itemCache = new Cache<Long,PreferenceArray>(new ItemPrefArrayRetriever(), 1 << 20);
-    itemIDsFromUserCache = new Cache<Long,FastIDSet>(new ItemIDsFromUserRetriever(), 1 << 20);
-    userIDsFromItemCache = new Cache<Long,FastIDSet>(new UserIDsFromItemRetriever(), 1 << 20);
-    userCountCache = new AtomicReference<Integer>(null);
-    itemCountCache = new AtomicReference<Integer>(null);
+    userCache = new Cache<>(new UserPrefArrayRetriever(), 1 << 20);
+    itemCache = new Cache<>(new ItemPrefArrayRetriever(), 1 << 20);
+    itemIDsFromUserCache = new Cache<>(new ItemIDsFromUserRetriever(), 1 << 20);
+    userIDsFromItemCache = new Cache<>(new UserIDsFromItemRetriever(), 1 << 20);
+    userCountCache = new AtomicReference<>(null);
+    itemCountCache = new AtomicReference<>(null);
   }
 
   @Override
@@ -262,25 +262,25 @@ public final class CassandraDataModel implements DataModel, Closeable {
 
     Mutator<Long> mutator = HFactory.createMutator(keyspace, LongSerializer.get());
 
-    HColumn<Long,Float> itemForUsers = new HColumnImpl<Long,Float>(LongSerializer.get(), FloatSerializer.get());
+    HColumn<Long,Float> itemForUsers = new HColumnImpl<>(LongSerializer.get(), FloatSerializer.get());
     itemForUsers.setName(itemID);
     itemForUsers.setClock(now);
     itemForUsers.setValue(value);
     mutator.addInsertion(userID, USERS_CF, itemForUsers);
 
-    HColumn<Long,Float> userForItems = new HColumnImpl<Long,Float>(LongSerializer.get(), FloatSerializer.get());
+    HColumn<Long,Float> userForItems = new HColumnImpl<>(LongSerializer.get(), FloatSerializer.get());
     userForItems.setName(userID);
     userForItems.setClock(now);
     userForItems.setValue(value);
     mutator.addInsertion(itemID, ITEMS_CF, userForItems);
 
-    HColumn<Long,byte[]> userIDs = new HColumnImpl<Long,byte[]>(LongSerializer.get(), BytesArraySerializer.get());
+    HColumn<Long,byte[]> userIDs = new HColumnImpl<>(LongSerializer.get(), BytesArraySerializer.get());
     userIDs.setName(userID);
     userIDs.setClock(now);
     userIDs.setValue(EMPTY);
     mutator.addInsertion(ID_ROW_KEY, USER_IDS_CF, userIDs);
 
-    HColumn<Long,byte[]> itemIDs = new HColumnImpl<Long,byte[]>(LongSerializer.get(), BytesArraySerializer.get());
+    HColumn<Long,byte[]> itemIDs = new HColumnImpl<>(LongSerializer.get(), BytesArraySerializer.get());
     itemIDs.setName(itemID);
     itemIDs.setClock(now);
     itemIDs.setValue(EMPTY);

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/cf/taste/impl/model/jdbc/AbstractJDBCDataModel.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/cf/taste/impl/model/jdbc/AbstractJDBCDataModel.java b/integration/src/main/java/org/apache/mahout/cf/taste/impl/model/jdbc/AbstractJDBCDataModel.java
index e3d4c3f..d9317c0 100644
--- a/integration/src/main/java/org/apache/mahout/cf/taste/impl/model/jdbc/AbstractJDBCDataModel.java
+++ b/integration/src/main/java/org/apache/mahout/cf/taste/impl/model/jdbc/AbstractJDBCDataModel.java
@@ -218,7 +218,7 @@ public abstract class AbstractJDBCDataModel extends AbstractJDBCComponent implem
 
     this.cachedNumUsers = -1;
     this.cachedNumItems = -1;
-    this.itemPrefCounts = new Cache<Long,Integer>(new ItemPrefCountRetriever(getNumPreferenceForItemSQL));
+    this.itemPrefCounts = new Cache<>(new ItemPrefCountRetriever(getNumPreferenceForItemSQL));
 
     this.maxPreference = Float.NaN;
     this.minPreference = Float.NaN;
@@ -311,7 +311,7 @@ public abstract class AbstractJDBCDataModel extends AbstractJDBCComponent implem
     Statement stmt = null;
     ResultSet rs = null;
 
-    FastByIDMap<PreferenceArray> result = new FastByIDMap<PreferenceArray>();
+    FastByIDMap<PreferenceArray> result = new FastByIDMap<>();
 
     try {
       conn = dataSource.getConnection();
@@ -356,7 +356,7 @@ public abstract class AbstractJDBCDataModel extends AbstractJDBCComponent implem
     Statement stmt = null;
     ResultSet rs = null;
 
-    FastByIDMap<FastIDSet> result = new FastByIDMap<FastIDSet>();
+    FastByIDMap<FastIDSet> result = new FastByIDMap<>();
 
     try {
       conn = dataSource.getConnection();

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/cf/taste/web/RecommenderServlet.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/cf/taste/web/RecommenderServlet.java b/integration/src/main/java/org/apache/mahout/cf/taste/web/RecommenderServlet.java
index 45c1ef1..a5a89c6 100644
--- a/integration/src/main/java/org/apache/mahout/cf/taste/web/RecommenderServlet.java
+++ b/integration/src/main/java/org/apache/mahout/cf/taste/web/RecommenderServlet.java
@@ -102,10 +102,8 @@ public final class RecommenderServlet extends HttpServlet {
       } else {
         throw new ServletException("Bad format parameter: " + format);
       }
-    } catch (TasteException te) {
+    } catch (TasteException | IOException te) {
       throw new ServletException(te);
-    } catch (IOException ioe) {
-      throw new ServletException(ioe);
     }
 
   }

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/clustering/cdbw/CDbwEvaluator.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/clustering/cdbw/CDbwEvaluator.java b/integration/src/main/java/org/apache/mahout/clustering/cdbw/CDbwEvaluator.java
index 7a4c211..972dec1 100644
--- a/integration/src/main/java/org/apache/mahout/clustering/cdbw/CDbwEvaluator.java
+++ b/integration/src/main/java/org/apache/mahout/clustering/cdbw/CDbwEvaluator.java
@@ -211,11 +211,11 @@ public final class CDbwEvaluator {
     if (interClusterDensities != null) {
       return interClusterDensities;
     }
-    interClusterDensities = new TreeMap<Integer,Map<Integer,Double>>();
+    interClusterDensities = new TreeMap<>();
     // find the closest representative points between the clusters
     for (int i = 0; i < clusters.size(); i++) {
       int cI = clusters.get(i).getId();
-      Map<Integer,Double> map = new TreeMap<Integer,Double>();
+      Map<Integer,Double> map = new TreeMap<>();
       interClusterDensities.put(cI, map);
       for (int j = i + 1; j < clusters.size(); j++) {
         int cJ = clusters.get(j).getId();
@@ -324,12 +324,12 @@ public final class CDbwEvaluator {
     if (minimumDistances != null) {
       return minimumDistances;
     }
-    minimumDistances = new TreeMap<Integer,Map<Integer,Double>>();
-    closestRepPointIndices = new TreeMap<Integer,Map<Integer,int[]>>();
+    minimumDistances = new TreeMap<>();
+    closestRepPointIndices = new TreeMap<>();
     for (int i = 0; i < clusters.size(); i++) {
       Integer cI = clusters.get(i).getId();
-      Map<Integer,Double> map = new TreeMap<Integer,Double>();
-      Map<Integer,int[]> treeMap = new TreeMap<Integer,int[]>();
+      Map<Integer,Double> map = new TreeMap<>();
+      Map<Integer,int[]> treeMap = new TreeMap<>();
       closestRepPointIndices.put(cI, treeMap);
       minimumDistances.put(cI, map);
       List<VectorWritable> closRepI = representativePoints.get(cI);

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/clustering/conversion/InputMapper.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/clustering/conversion/InputMapper.java b/integration/src/main/java/org/apache/mahout/clustering/conversion/InputMapper.java
index ec8b3f5..9dbce5a 100644
--- a/integration/src/main/java/org/apache/mahout/clustering/conversion/InputMapper.java
+++ b/integration/src/main/java/org/apache/mahout/clustering/conversion/InputMapper.java
@@ -59,11 +59,7 @@ public class InputMapper extends Mapper<LongWritable, Text, Text, VectorWritable
         VectorWritable vectorWritable = new VectorWritable(result);
         context.write(new Text(String.valueOf(index)), vectorWritable);
 
-      } catch (InstantiationException e) {
-        throw new IllegalStateException(e);
-      } catch (IllegalAccessException e) {
-        throw new IllegalStateException(e);
-      } catch (InvocationTargetException e) {
+      } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
         throw new IllegalStateException(e);
       }
     }
@@ -77,9 +73,7 @@ public class InputMapper extends Mapper<LongWritable, Text, Text, VectorWritable
     try {
       Class<? extends Vector> outputClass = conf.getClassByName(vectorImplClassName).asSubclass(Vector.class);
       constructor = outputClass.getConstructor(int.class);
-    } catch (NoSuchMethodException e) {
-      throw new IllegalStateException(e);
-    } catch (ClassNotFoundException e) {
+    } catch (NoSuchMethodException | ClassNotFoundException e) {
       throw new IllegalStateException(e);
     }
   }

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/clustering/evaluation/ClusterEvaluator.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/clustering/evaluation/ClusterEvaluator.java b/integration/src/main/java/org/apache/mahout/clustering/evaluation/ClusterEvaluator.java
index 30f1a97..3cd06eb 100644
--- a/integration/src/main/java/org/apache/mahout/clustering/evaluation/ClusterEvaluator.java
+++ b/integration/src/main/java/org/apache/mahout/clustering/evaluation/ClusterEvaluator.java
@@ -129,7 +129,7 @@ public class ClusterEvaluator {
    * @return a Map<Integer, Vector>
    */
   public Map<Integer,Vector> interClusterDistances() {
-    Map<Integer,Vector> distances = new TreeMap<Integer,Vector>();
+    Map<Integer,Vector> distances = new TreeMap<>();
     for (int i = 0; i < clusters.size(); i++) {
       Cluster clusterI = clusters.get(i);
       RandomAccessSparseVector row = new RandomAccessSparseVector(Integer.MAX_VALUE);

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/clustering/lda/LDAPrintTopics.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/clustering/lda/LDAPrintTopics.java b/integration/src/main/java/org/apache/mahout/clustering/lda/LDAPrintTopics.java
index 32d2421..cb8d935 100644
--- a/integration/src/main/java/org/apache/mahout/clustering/lda/LDAPrintTopics.java
+++ b/integration/src/main/java/org/apache/mahout/clustering/lda/LDAPrintTopics.java
@@ -147,7 +147,7 @@ public final class LDAPrintTopics {
       q.poll();
     }
     if (q.size() < numWordsToPrint) {
-      q.add(new Pair<String,Double>(word, score));
+      q.add(new Pair<>(word, score));
     }
   }
   
@@ -218,10 +218,10 @@ public final class LDAPrintTopics {
     }
     for (int i = 0; i < queues.size(); i++) {
       Queue<Pair<String,Double>> queue = queues.get(i);
-      Queue<Pair<String,Double>> newQueue = new PriorityQueue<Pair<String, Double>>(queue.size());
+      Queue<Pair<String,Double>> newQueue = new PriorityQueue<>(queue.size());
       double norm = expSums.get(i);
       for (Pair<String,Double> pair : queue) {
-        newQueue.add(new Pair<String,Double>(pair.getFirst(), Math.exp(pair.getSecond()) / norm));
+        newQueue.add(new Pair<>(pair.getFirst(), Math.exp(pair.getSecond()) / norm));
       }
       queues.set(i, newQueue);
     }

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/text/MultipleTextFileInputFormat.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/text/MultipleTextFileInputFormat.java b/integration/src/main/java/org/apache/mahout/text/MultipleTextFileInputFormat.java
index 32aa4a9..44df006 100644
--- a/integration/src/main/java/org/apache/mahout/text/MultipleTextFileInputFormat.java
+++ b/integration/src/main/java/org/apache/mahout/text/MultipleTextFileInputFormat.java
@@ -40,7 +40,7 @@ public class MultipleTextFileInputFormat extends CombineFileInputFormat<IntWrita
   public RecordReader<IntWritable, BytesWritable> createRecordReader(InputSplit inputSplit,
                                                                       TaskAttemptContext taskAttemptContext)
       throws IOException {
-    return new CombineFileRecordReader<IntWritable, BytesWritable>((CombineFileSplit) inputSplit,
-      taskAttemptContext, WholeFileRecordReader.class);
+    return new CombineFileRecordReader<>((CombineFileSplit) inputSplit,
+        taskAttemptContext, WholeFileRecordReader.class);
   }
 }

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/text/PrefixAdditionFilter.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/text/PrefixAdditionFilter.java b/integration/src/main/java/org/apache/mahout/text/PrefixAdditionFilter.java
index a13341b..9c0bc11 100644
--- a/integration/src/main/java/org/apache/mahout/text/PrefixAdditionFilter.java
+++ b/integration/src/main/java/org/apache/mahout/text/PrefixAdditionFilter.java
@@ -48,7 +48,7 @@ public final class PrefixAdditionFilter extends SequenceFilesFromDirectoryFilter
   protected void process(FileStatus fst, Path current) throws IOException {
     FileSystem fs = getFs();
     ChunkedWriter writer = getWriter();
-    if (fst.isDir()) {
+    if (fst.isDirectory()) {
       String dirPath = getPrefix() + Path.SEPARATOR + current.getName() + Path.SEPARATOR + fst.getPath().getName();
       fs.listStatus(fst.getPath(),
                     new PrefixAdditionFilter(getConf(), dirPath, getOptions(), writer, getCharset(), fs));

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/text/ReadOnlyFileSystemDirectory.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/text/ReadOnlyFileSystemDirectory.java b/integration/src/main/java/org/apache/mahout/text/ReadOnlyFileSystemDirectory.java
index e97e35b..18c1252 100644
--- a/integration/src/main/java/org/apache/mahout/text/ReadOnlyFileSystemDirectory.java
+++ b/integration/src/main/java/org/apache/mahout/text/ReadOnlyFileSystemDirectory.java
@@ -79,7 +79,7 @@ public class ReadOnlyFileSystemDirectory extends BaseDirectory {
     try {
       FileStatus status = fs.getFileStatus(directory);
       if (status != null) {
-        isDir = status.isDir();
+        isDir = status.isDirectory();
       }
     } catch (IOException e) {
       log.error(e.getMessage(), e);
@@ -99,7 +99,7 @@ public class ReadOnlyFileSystemDirectory extends BaseDirectory {
     try {
       FileStatus status = fs.getFileStatus(directory);
       if (status != null) {
-        isDir = status.isDir();
+        isDir = status.isDirectory();
       }
     } catch (IOException e) {
       log.error(e.getMessage(), e);

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromDirectory.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromDirectory.java b/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromDirectory.java
index 9d08984..720078c 100644
--- a/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromDirectory.java
+++ b/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromDirectory.java
@@ -142,9 +142,7 @@ public class SequenceFilesFromDirectory extends AbstractJob {
     if (!StringUtils.isBlank(fileFilterClassName) && !PrefixAdditionFilter.class.getName().equals(fileFilterClassName)) {
       try {
         pathFilter = (PathFilter) Class.forName(fileFilterClassName).newInstance();
-      } catch (InstantiationException e) {
-        throw new IllegalStateException(e);
-      } catch (IllegalAccessException e) {
+      } catch (InstantiationException | IllegalAccessException e) {
         throw new IllegalStateException(e);
       }
     }

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorageMRJob.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorageMRJob.java b/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorageMRJob.java
index 6c80aa0..5b45c48 100644
--- a/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorageMRJob.java
+++ b/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorageMRJob.java
@@ -58,11 +58,7 @@ public class SequenceFilesFromLuceneStorageMRJob {
       job.setNumReduceTasks(0);
 
       job.waitForCompletion(true);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    } catch (InterruptedException e) {
-      throw new RuntimeException(e);
-    } catch (ClassNotFoundException e) {
+    } catch (IOException | InterruptedException | ClassNotFoundException e) {
       throw new RuntimeException(e);
     }
   }

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromMailArchives.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromMailArchives.java b/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromMailArchives.java
index 8b135bc..30c2a47 100644
--- a/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromMailArchives.java
+++ b/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromMailArchives.java
@@ -105,9 +105,9 @@ public final class SequenceFilesFromMailArchives extends AbstractJob {
     private static final Comparator<File> FILE_COMPARATOR = new CompositeFileComparator(
         DirectoryFileComparator.DIRECTORY_REVERSE, PathFileComparator.PATH_COMPARATOR);
 
-    private final Deque<MailProcessor> processors = new ArrayDeque<MailProcessor>();
+    private final Deque<MailProcessor> processors = new ArrayDeque<>();
     private final ChunkedWriter writer;
-    private final Deque<Long> messageCounts = new ArrayDeque<Long>();
+    private final Deque<Long> messageCounts = new ArrayDeque<>();
 
     public PrefixAdditionDirectoryWalker(MailProcessor processor, ChunkedWriter writer) {
       processors.addFirst(processor);

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/text/WholeFileRecordReader.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/text/WholeFileRecordReader.java b/integration/src/main/java/org/apache/mahout/text/WholeFileRecordReader.java
index 7489842..fdb3654 100644
--- a/integration/src/main/java/org/apache/mahout/text/WholeFileRecordReader.java
+++ b/integration/src/main/java/org/apache/mahout/text/WholeFileRecordReader.java
@@ -82,11 +82,7 @@ public class WholeFileRecordReader extends RecordReader<IntWritable, BytesWritab
     if (!StringUtils.isBlank(fileFilterClassName) && !PrefixAdditionFilter.class.getName().equals(fileFilterClassName)) {
       try {
         pathFilter = (PathFilter) Class.forName(fileFilterClassName).newInstance();
-      } catch (ClassNotFoundException e) {
-        throw new IllegalStateException(e);
-      } catch (InstantiationException e) {
-        throw new IllegalStateException(e);
-      } catch (IllegalAccessException e) {
+      } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
         throw new IllegalStateException(e);
       }
     }

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/text/WikipediaToSequenceFile.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/text/WikipediaToSequenceFile.java b/integration/src/main/java/org/apache/mahout/text/WikipediaToSequenceFile.java
index 19f353c..1cde4cd 100644
--- a/integration/src/main/java/org/apache/mahout/text/WikipediaToSequenceFile.java
+++ b/integration/src/main/java/org/apache/mahout/text/WikipediaToSequenceFile.java
@@ -128,13 +128,7 @@ public final class WikipediaToSequenceFile {
       }
 
       runJob(inputPath, outputPath, catFile, cmdLine.hasOption(exactMatchOpt), all, removeLabels);
-    } catch (OptionException e) {
-      log.error("Exception", e);
-      CommandLineUtil.printHelp(group);
-    } catch (InterruptedException e) {
-      log.error("Exception", e);
-      CommandLineUtil.printHelp(group);
-    } catch (ClassNotFoundException e) {
+    } catch (OptionException | InterruptedException | ClassNotFoundException e) {
       log.error("Exception", e);
       CommandLineUtil.printHelp(group);
     }
@@ -182,7 +176,7 @@ public final class WikipediaToSequenceFile {
     }
     
     Stringifier<Set<String>> setStringifier =
-        new DefaultStringifier<Set<String>>(conf, GenericsUtil.getClass(categories));
+        new DefaultStringifier<>(conf, GenericsUtil.getClass(categories));
     
     String categoriesStr = setStringifier.toString(categories);    
     conf.set("wikipedia.categories", categoriesStr);

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/utils/MatrixDumper.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/utils/MatrixDumper.java b/integration/src/main/java/org/apache/mahout/utils/MatrixDumper.java
index 67cace4..8ab57be 100644
--- a/integration/src/main/java/org/apache/mahout/utils/MatrixDumper.java
+++ b/integration/src/main/java/org/apache/mahout/utils/MatrixDumper.java
@@ -68,7 +68,7 @@ public final class MatrixDumper extends AbstractJob {
   
   private static void exportCSV(Path inputPath, String outputFile, boolean doLabels) throws IOException {
     SequenceFileValueIterator<MatrixWritable> it =
-        new SequenceFileValueIterator<MatrixWritable>(inputPath, true, new Configuration());
+        new SequenceFileValueIterator<>(inputPath, true, new Configuration());
     Matrix m = it.next().get();
     it.close();
     PrintStream ps = getPrintStream(outputFile);

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/utils/SequenceFileDumper.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/utils/SequenceFileDumper.java b/integration/src/main/java/org/apache/mahout/utils/SequenceFileDumper.java
index a337be8..bf6691f 100644
--- a/integration/src/main/java/org/apache/mahout/utils/SequenceFileDumper.java
+++ b/integration/src/main/java/org/apache/mahout/utils/SequenceFileDumper.java
@@ -17,20 +17,18 @@
 
 package org.apache.mahout.utils;
 
-import java.io.File;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.util.List;
-
 import com.google.common.base.Charsets;
 import com.google.common.collect.Lists;
 import com.google.common.io.Closeables;
 import com.google.common.io.Files;
+import java.io.File;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Writable;
 import org.apache.mahout.common.AbstractJob;
 import org.apache.mahout.common.Pair;
 import org.apache.mahout.common.iterator.sequencefile.PathFilters;
@@ -64,7 +62,7 @@ public final class SequenceFileDumper extends AbstractJob {
     Configuration conf = new Configuration();
     Path input = getInputPath();
     FileSystem fs = input.getFileSystem(conf);
-    if (fs.getFileStatus(input).isDir()) {
+    if (fs.getFileStatus(input).isDirectory()) {
       pathArr = FileUtil.stat2Paths(fs.listStatus(input, PathFilters.logsCRCFilter()));
     } else {
       pathArr = new Path[1];
@@ -92,14 +90,14 @@ public final class SequenceFileDumper extends AbstractJob {
           sub = Integer.parseInt(getOption("substring"));
         }
         boolean countOnly = hasOption("count");
-        SequenceFileIterator<?, ?> iterator = new SequenceFileIterator<Writable, Writable>(path, true, conf);
+        SequenceFileIterator<?, ?> iterator = new SequenceFileIterator<>(path, true, conf);
         if (!hasOption("quiet")) {
           writer.append("Key class: ").append(iterator.getKeyClass().toString());
           writer.append(" Value Class: ").append(iterator.getValueClass().toString()).append('\n');
         }
         OpenObjectIntHashMap<String> facets = null;
         if (hasOption("facets")) {
-          facets = new OpenObjectIntHashMap<String>();
+          facets = new OpenObjectIntHashMap<>();
         }
         long count = 0;
         if (countOnly) {

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/utils/SplitInput.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/utils/SplitInput.java b/integration/src/main/java/org/apache/mahout/utils/SplitInput.java
index 83ff3c6..834d5cd 100644
--- a/integration/src/main/java/org/apache/mahout/utils/SplitInput.java
+++ b/integration/src/main/java/org/apache/mahout/utils/SplitInput.java
@@ -289,7 +289,7 @@ public class SplitInput extends AbstractJob {
     if (fs.getFileStatus(inputDir) == null) {
       throw new IOException(inputDir + " does not exist");
     }
-    if (!fs.getFileStatus(inputDir).isDir()) {
+    if (!fs.getFileStatus(inputDir).isDirectory()) {
       throw new IOException(inputDir + " is not a directory");
     }
 
@@ -300,7 +300,7 @@ public class SplitInput extends AbstractJob {
       // input dir contains one file per category.
       FileStatus[] fileStats = fs.listStatus(inputDir, PathFilters.logsCRCFilter());
       for (FileStatus inputFile : fileStats) {
-        if (!inputFile.isDir()) {
+        if (!inputFile.isDirectory()) {
           splitFile(inputFile.getPath());
         }
       }
@@ -317,7 +317,7 @@ public class SplitInput extends AbstractJob {
     if (fs.getFileStatus(inputFile) == null) {
       throw new IOException(inputFile + " does not exist");
     }
-    if (fs.getFileStatus(inputFile).isDir()) {
+    if (fs.getFileStatus(inputFile).isDirectory()) {
       throw new IOException(inputFile + " is a directory");
     }
 
@@ -419,7 +419,7 @@ public class SplitInput extends AbstractJob {
       }
     } else {
       SequenceFileIterator<Writable, Writable> iterator =
-              new SequenceFileIterator<Writable, Writable>(inputFile, false, fs.getConf());
+              new SequenceFileIterator<>(inputFile, false, fs.getConf());
       SequenceFile.Writer trainingWriter = SequenceFile.createWriter(fs, fs.getConf(), trainingOutputFile,
           iterator.getKeyClass(), iterator.getValueClass());
       SequenceFile.Writer testWriter = SequenceFile.createWriter(fs, fs.getConf(), testOutputFile,
@@ -650,10 +650,10 @@ public class SplitInput extends AbstractJob {
       Configuration conf = getConf();
       FileSystem fs = trainingOutputDirectory.getFileSystem(conf);
       FileStatus trainingOutputDirStatus = fs.getFileStatus(trainingOutputDirectory);
-      Preconditions.checkArgument(trainingOutputDirStatus != null && trainingOutputDirStatus.isDir(),
+      Preconditions.checkArgument(trainingOutputDirStatus != null && trainingOutputDirStatus.isDirectory(),
           "%s is not a directory", trainingOutputDirectory);
       FileStatus testOutputDirStatus = fs.getFileStatus(testOutputDirectory);
-      Preconditions.checkArgument(testOutputDirStatus != null && testOutputDirStatus.isDir(),
+      Preconditions.checkArgument(testOutputDirStatus != null && testOutputDirStatus.isDirectory(),
           "%s is not a directory", testOutputDirectory);
     }
   }

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/utils/SplitInputJob.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/utils/SplitInputJob.java b/integration/src/main/java/org/apache/mahout/utils/SplitInputJob.java
index ad8c977..825f7a5 100644
--- a/integration/src/main/java/org/apache/mahout/utils/SplitInputJob.java
+++ b/integration/src/main/java/org/apache/mahout/utils/SplitInputJob.java
@@ -87,7 +87,7 @@ public final class SplitInputJob {
     FileSystem fs = FileSystem.get(initialConf);
 
     SequenceFileDirIterator<? extends WritableComparable, Writable> iterator =
-        new SequenceFileDirIterator<WritableComparable, Writable>(inputPath,
+        new SequenceFileDirIterator<>(inputPath,
             PathType.LIST, PathFilters.partFilter(), null, false, fs.getConf());
     Class<? extends WritableComparable> keyClass;
     Class<? extends Writable> valueClass;

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/utils/clustering/AbstractClusterWriter.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/utils/clustering/AbstractClusterWriter.java b/integration/src/main/java/org/apache/mahout/utils/clustering/AbstractClusterWriter.java
index e654891..ac884d0 100644
--- a/integration/src/main/java/org/apache/mahout/utils/clustering/AbstractClusterWriter.java
+++ b/integration/src/main/java/org/apache/mahout/utils/clustering/AbstractClusterWriter.java
@@ -142,7 +142,7 @@ public abstract class AbstractClusterWriter implements ClusterWriter {
         log.error("Dictionary entry missing for {}", index);
         continue;
       }
-      topTerms.add(new Pair<String, Double>(dictTerm, vectorTerms.get(i).weight));
+      topTerms.add(new Pair<>(dictTerm, vectorTerms.get(i).weight));
     }
 
     return topTerms;

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/utils/vectors/VectorDumper.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/utils/vectors/VectorDumper.java b/integration/src/main/java/org/apache/mahout/utils/vectors/VectorDumper.java
index 51a22fb..2a8a42b 100644
--- a/integration/src/main/java/org/apache/mahout/utils/vectors/VectorDumper.java
+++ b/integration/src/main/java/org/apache/mahout/utils/vectors/VectorDumper.java
@@ -97,7 +97,7 @@ public final class VectorDumper extends AbstractJob {
     FileSystem fs = FileSystem.get(conf);
     Path input = getInputPath();
     FileStatus fileStatus = fs.getFileStatus(input);
-    if (fileStatus.isDir()) {
+    if (fileStatus.isDirectory()) {
       pathArr = FileUtil.stat2Paths(fs.listStatus(input, PathFilters.logsCRCFilter()));
     } else {
       FileStatus[] inputPaths = fs.globStatus(input);
@@ -186,8 +186,7 @@ public final class VectorDumper extends AbstractJob {
         if (quiet) {
           log.info("Processing file '{}' ({}/{})", path, ++fileCount, pathArr.length);
         }
-        SequenceFileIterable<Writable, Writable> iterable =
-                new SequenceFileIterable<Writable, Writable>(path, true, conf);
+        SequenceFileIterable<Writable, Writable> iterable = new SequenceFileIterable<>(path, true, conf);
         Iterator<Pair<Writable, Writable>> iterator = iterable.iterator();
         long i = 0;
         while (iterator.hasNext() && (numItems == null || itemCount < numItems)) {

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/utils/vectors/VectorHelper.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/utils/vectors/VectorHelper.java b/integration/src/main/java/org/apache/mahout/utils/vectors/VectorHelper.java
index 00fbf33..29b02aa 100644
--- a/integration/src/main/java/org/apache/mahout/utils/vectors/VectorHelper.java
+++ b/integration/src/main/java/org/apache/mahout/utils/vectors/VectorHelper.java
@@ -91,7 +91,7 @@ public final class VectorHelper {
       maxEntries = sizeOfNonZeroElementsInVector;
     }
 
-    PriorityQueue<Pair<Integer, Double>> queue = new TDoublePQ<Integer>(-1, maxEntries);
+    PriorityQueue<Pair<Integer, Double>> queue = new TDoublePQ<>(-1, maxEntries);
     for (Element e : vector.nonZeroes()) {
       queue.insertWithOverflow(Pair.of(e.index(), e.get()));
     }
@@ -177,11 +177,8 @@ public final class VectorHelper {
    * </pre>
    */
   public static String[] loadTermDictionary(File dictFile) throws IOException {
-    InputStream in = new FileInputStream(dictFile);
-    try {
+    try (InputStream in = new FileInputStream(dictFile)) {
       return loadTermDictionary(in);
-    } finally {
-      in.close();
     }
   }
 
@@ -192,7 +189,7 @@ public final class VectorHelper {
    * @param filePattern <PATH TO DICTIONARY>/dictionary.file-*
    */
   public static String[] loadTermDictionary(Configuration conf, String filePattern) {
-    OpenObjectIntHashMap<String> dict = new OpenObjectIntHashMap<String>();
+    OpenObjectIntHashMap<String> dict = new OpenObjectIntHashMap<>();
     int maxIndexValue = 0;
     for (Pair<Text, IntWritable> record
         : new SequenceFileDirIterable<Text, IntWritable>(new Path(filePattern), PathType.GLOB, null, null, true,

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/main/java/org/apache/mahout/utils/vectors/lucene/ClusterLabels.java
----------------------------------------------------------------------
diff --git a/integration/src/main/java/org/apache/mahout/utils/vectors/lucene/ClusterLabels.java b/integration/src/main/java/org/apache/mahout/utils/vectors/lucene/ClusterLabels.java
index 8fe4f65..06bec60 100644
--- a/integration/src/main/java/org/apache/mahout/utils/vectors/lucene/ClusterLabels.java
+++ b/integration/src/main/java/org/apache/mahout/utils/vectors/lucene/ClusterLabels.java
@@ -187,7 +187,7 @@ public class ClusterLabels {
      */
     Terms t = MultiFields.getTerms(reader, contentField);
     TermsEnum te = t.iterator(null);
-    Map<String, TermEntry> termEntryMap = new LinkedHashMap<String, TermEntry>();
+    Map<String, TermEntry> termEntryMap = new LinkedHashMap<>();
     Bits liveDocs = MultiFields.getLiveDocs(reader); //WARNING: returns null if there are no deletions
 
 
@@ -246,7 +246,7 @@ public class ClusterLabels {
     
     Set<String>  idFieldSelector = null;
     if (idField != null) {
-      idFieldSelector = new TreeSet<String>();
+      idFieldSelector = new TreeSet<>();
       idFieldSelector.add(idField);
     }
     

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/test/java/org/apache/mahout/text/LuceneSegmentRecordReaderTest.java
----------------------------------------------------------------------
diff --git a/integration/src/test/java/org/apache/mahout/text/LuceneSegmentRecordReaderTest.java b/integration/src/test/java/org/apache/mahout/text/LuceneSegmentRecordReaderTest.java
index 6d0906b..28f2ac8 100644
--- a/integration/src/test/java/org/apache/mahout/text/LuceneSegmentRecordReaderTest.java
+++ b/integration/src/test/java/org/apache/mahout/text/LuceneSegmentRecordReaderTest.java
@@ -43,7 +43,8 @@ public class LuceneSegmentRecordReaderTest extends AbstractLuceneStorageTest {
 
   @Before
   public void before() throws IOException, InterruptedException {
-    LuceneStorageConfiguration lucene2SeqConf = new LuceneStorageConfiguration(getConfiguration(), asList(getIndexPath1()), new Path("output"), ID_FIELD, asList(FIELD));
+    LuceneStorageConfiguration lucene2SeqConf = new LuceneStorageConfiguration(getConfiguration(),
+        asList(getIndexPath1()), new Path("output"), ID_FIELD, asList(FIELD));
     configuration = lucene2SeqConf.serialize();
     recordReader = new LuceneSegmentRecordReader();
     commitDocuments(getDirectory(getIndexPath1AsFile()), docs.subList(0, 500));
@@ -61,13 +62,17 @@ public class LuceneSegmentRecordReaderTest extends AbstractLuceneStorageTest {
   public void testKey() throws Exception {
     for (SegmentCommitInfo segmentInfo : segmentInfos) {
       int docId = 0;
-      LuceneSegmentInputSplit inputSplit = new LuceneSegmentInputSplit(getIndexPath1(), segmentInfo.info.name, segmentInfo.sizeInBytes());
+      LuceneSegmentInputSplit inputSplit = new LuceneSegmentInputSplit(getIndexPath1(),
+          segmentInfo.info.name, segmentInfo.sizeInBytes());
       TaskAttemptContext context = getTaskAttemptContext(configuration, new TaskAttemptID());
       recordReader.initialize(inputSplit, context);
       for (int i = 0; i < 500; i++){
         recordReader.nextKeyValue();
-        //we can't be sure of the order we are getting the segments, so we have to fudge here a bit on the id, but it is either id: i or i + 500
-        assertTrue("i = " + i + " docId= " + docId, String.valueOf(docId).equals(recordReader.getCurrentKey().toString()) || String.valueOf(docId+500).equals(recordReader.getCurrentKey().toString()));
+        //we can't be sure of the order we are getting the segments, so we have to fudge here a bit on the id,
+        // but it is either id: i or i + 500
+        assertTrue("i = " + i + " docId= " +
+            docId, String.valueOf(docId).equals(recordReader.getCurrentKey().toString()) ||
+            String.valueOf(docId+500).equals(recordReader.getCurrentKey().toString()));
         assertEquals(NullWritable.get(), recordReader.getCurrentValue());
         docId++;
       }
@@ -76,18 +81,22 @@ public class LuceneSegmentRecordReaderTest extends AbstractLuceneStorageTest {
 
   @Test(expected = IllegalArgumentException.class)
   public void testNonExistingIdField() throws Exception {
-    configuration = new LuceneStorageConfiguration(getConfiguration(), asList(getIndexPath1()), new Path("output"), "nonExistingId", asList(FIELD)).serialize();
+    configuration = new LuceneStorageConfiguration(getConfiguration(),
+        asList(getIndexPath1()), new Path("output"), "nonExistingId", asList(FIELD)).serialize();
     SegmentCommitInfo segmentInfo = segmentInfos.iterator().next();
-    LuceneSegmentInputSplit inputSplit = new LuceneSegmentInputSplit(getIndexPath1(), segmentInfo.info.name, segmentInfo.sizeInBytes());
+    LuceneSegmentInputSplit inputSplit = new LuceneSegmentInputSplit(getIndexPath1(),
+        segmentInfo.info.name, segmentInfo.sizeInBytes());
     TaskAttemptContext context = getTaskAttemptContext(configuration, new TaskAttemptID());
     recordReader.initialize(inputSplit, context);
   }
 
   @Test(expected = IllegalArgumentException.class)
   public void testNonExistingField() throws Exception {
-    configuration = new LuceneStorageConfiguration(getConfiguration(), asList(getIndexPath1()), new Path("output"), ID_FIELD, asList("nonExistingField")).serialize();
+    configuration = new LuceneStorageConfiguration(getConfiguration(), asList(getIndexPath1()),
+        new Path("output"), ID_FIELD, asList("nonExistingField")).serialize();
     SegmentCommitInfo segmentInfo = segmentInfos.iterator().next();
-    LuceneSegmentInputSplit inputSplit = new LuceneSegmentInputSplit(getIndexPath1(), segmentInfo.info.name, segmentInfo.sizeInBytes());
+    LuceneSegmentInputSplit inputSplit = new LuceneSegmentInputSplit(getIndexPath1(),
+        segmentInfo.info.name, segmentInfo.sizeInBytes());
     TaskAttemptContext context = getTaskAttemptContext(configuration, new TaskAttemptID());
     recordReader.initialize(inputSplit, context);
   }

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/test/java/org/apache/mahout/text/TestSequenceFilesFromDirectory.java
----------------------------------------------------------------------
diff --git a/integration/src/test/java/org/apache/mahout/text/TestSequenceFilesFromDirectory.java b/integration/src/test/java/org/apache/mahout/text/TestSequenceFilesFromDirectory.java
index ab197c2..aec5e39 100644
--- a/integration/src/test/java/org/apache/mahout/text/TestSequenceFilesFromDirectory.java
+++ b/integration/src/test/java/org/apache/mahout/text/TestSequenceFilesFromDirectory.java
@@ -219,7 +219,7 @@ public final class TestSequenceFilesFromDirectory extends MahoutTestCase {
 
     // read a chunk to check content
     SequenceFileIterator<Text, Text> iterator =
-      new SequenceFileIterator<Text, Text>(fileStatuses[0].getPath(), true, configuration);
+      new SequenceFileIterator<>(fileStatuses[0].getPath(), true, configuration);
     try {
       while (iterator.hasNext()) {
         Pair<Text, Text> record = iterator.next();
@@ -254,7 +254,8 @@ public final class TestSequenceFilesFromDirectory extends MahoutTestCase {
     }
 
     // read a chunk to check content
-    SequenceFileIterator<Text, Text> iterator = new SequenceFileIterator<Text, Text>(fileStatuses[0].getPath(), true, configuration);
+    SequenceFileIterator<Text, Text> iterator =
+        new SequenceFileIterator<>(fileStatuses[0].getPath(), true, configuration);
     try {
       while (iterator.hasNext()) {
         Pair<Text, Text> record = iterator.next();
@@ -285,7 +286,7 @@ public final class TestSequenceFilesFromDirectory extends MahoutTestCase {
     }
 
     // read a chunk to check content
-    SequenceFileIterator<Text, Text> iterator = new SequenceFileIterator<Text, Text>(
+    SequenceFileIterator<Text, Text> iterator = new SequenceFileIterator<>(
       fileStatuses[0].getPath(), true, conf);
     try {
       while (iterator.hasNext()) {
@@ -318,7 +319,7 @@ public final class TestSequenceFilesFromDirectory extends MahoutTestCase {
     }
 
     // read a chunk to check content
-    SequenceFileIterator<Text, Text> iterator = new SequenceFileIterator<Text, Text>(
+    SequenceFileIterator<Text, Text> iterator = new SequenceFileIterator<>(
       fileStatuses[0].getPath(), true, configuration);
     try {
       while (iterator.hasNext()) {

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/integration/src/test/java/org/apache/mahout/utils/TestConcatenateVectorsJob.java
----------------------------------------------------------------------
diff --git a/integration/src/test/java/org/apache/mahout/utils/TestConcatenateVectorsJob.java b/integration/src/test/java/org/apache/mahout/utils/TestConcatenateVectorsJob.java
index c0fcb89..329b590 100644
--- a/integration/src/test/java/org/apache/mahout/utils/TestConcatenateVectorsJob.java
+++ b/integration/src/test/java/org/apache/mahout/utils/TestConcatenateVectorsJob.java
@@ -57,7 +57,7 @@ public class TestConcatenateVectorsJob extends MahoutTestCase {
     // Yes, all of this generic rigmarole is needed, and woe betide he who changes it
     ConcatenateVectorsReducer reducer = new ConcatenateVectorsReducer();
 
-    DummyRecordWriter<IntWritable, VectorWritable> recordWriter = new DummyRecordWriter<IntWritable, VectorWritable>();
+    DummyRecordWriter<IntWritable, VectorWritable> recordWriter = new DummyRecordWriter<>();
 
     Reducer<IntWritable, VectorWritable, IntWritable, VectorWritable>.Context reduceContext =
       DummyRecordWriter.build(reducer, configuration, recordWriter, IntWritable.class, VectorWritable.class);

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/common/Refreshable.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/common/Refreshable.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/common/Refreshable.java
index b4fe9ca..9b26bee 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/common/Refreshable.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/common/Refreshable.java
@@ -44,7 +44,7 @@ public interface Refreshable {
    * 
    * @param alreadyRefreshed
    *          {@link org.apache.mahout.cf.taste.common.Refreshable}s that are known to have already been
-   *          refreshed as a result of an initial call to a {@link #refresh(Collection)} method on some
+   *          refreshed as a result of an initial call to a {#refresh(Collection)} method on some
    *          object. This ensure that objects in a refresh dependency graph aren't refreshed twice
    *          needlessly.
    */

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/ALS.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/ALS.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/ALS.java
index 3ad5c73..c5ccf38 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/ALS.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/ALS.java
@@ -35,7 +35,6 @@ import org.apache.mahout.common.iterator.sequencefile.SequenceFileDirValueIterat
 import org.apache.mahout.math.Vector;
 import org.apache.mahout.math.VectorWritable;
 import org.apache.mahout.math.als.AlternatingLeastSquaresSolver;
-import org.apache.mahout.math.function.Functions;
 import org.apache.mahout.math.map.OpenIntObjectHashMap;
 
 import java.io.IOException;
@@ -47,7 +46,7 @@ final class ALS {
   private ALS() {}
 
   static Vector readFirstRow(Path dir, Configuration conf) throws IOException {
-    Iterator<VectorWritable> iterator = new SequenceFileDirValueIterator<VectorWritable>(dir, PathType.LIST,
+    Iterator<VectorWritable> iterator = new SequenceFileDirValueIterator<>(dir, PathType.LIST,
         PathFilters.partFilter(), null, true, conf);
     return iterator.hasNext() ? iterator.next().get() : null;
   }
@@ -83,7 +82,7 @@ final class ALS {
   }
 
   public static OpenIntObjectHashMap<Vector> readMatrixByRows(Path dir, Configuration conf) {
-    OpenIntObjectHashMap<Vector> matrix = new OpenIntObjectHashMap<Vector>();
+    OpenIntObjectHashMap<Vector> matrix = new OpenIntObjectHashMap<>();
     for (Pair<IntWritable,VectorWritable> pair
         : new SequenceFileDirIterable<IntWritable,VectorWritable>(dir, PathType.LIST, PathFilters.partFilter(), conf)) {
       int rowIndex = pair.getFirst().get();

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/ParallelALSFactorizationJob.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/ParallelALSFactorizationJob.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/ParallelALSFactorizationJob.java
index 5b42353..624a8c4 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/ParallelALSFactorizationJob.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/ParallelALSFactorizationJob.java
@@ -102,10 +102,6 @@ public class ParallelALSFactorizationJob extends AbstractJob {
   private double lambda;
   private double alpha;
   private int numThreadsPerSolver;
-  private boolean usesLongIDs;
-
-  private int numItems;
-  private int numUsers;
 
   enum Stats { NUM_USERS }
 
@@ -138,7 +134,7 @@ public class ParallelALSFactorizationJob extends AbstractJob {
     implicitFeedback = Boolean.parseBoolean(getOption("implicitFeedback"));
 
     numThreadsPerSolver = Integer.parseInt(getOption("numThreadsPerSolver"));
-    usesLongIDs = Boolean.parseBoolean(getOption("usesLongIDs", String.valueOf(false)));
+    boolean usesLongIDs = Boolean.parseBoolean(getOption("usesLongIDs", String.valueOf(false)));
 
     /*
     * compute the factorization A = U M'
@@ -196,8 +192,8 @@ public class ParallelALSFactorizationJob extends AbstractJob {
 
     Vector averageRatings = ALS.readFirstRow(getTempPath("averageRatings"), getConf());
 
-    numItems = averageRatings.getNumNondefaultElements();
-    numUsers = (int) userRatings.getCounters().findCounter(Stats.NUM_USERS).getValue();
+    int numItems = averageRatings.getNumNondefaultElements();
+    int numUsers = (int) userRatings.getCounters().findCounter(Stats.NUM_USERS).getValue();
 
     log.info("Found {} users and {} items", numUsers, numItems);
 
@@ -208,11 +204,11 @@ public class ParallelALSFactorizationJob extends AbstractJob {
       /* broadcast M, read A row-wise, recompute U row-wise */
       log.info("Recomputing U (iteration {}/{})", currentIteration, numIterations);
       runSolver(pathToUserRatings(), pathToU(currentIteration), pathToM(currentIteration - 1), currentIteration, "U",
-                numItems);
+          numItems);
       /* broadcast U, read A' row-wise, recompute M row-wise */
       log.info("Recomputing M (iteration {}/{})", currentIteration, numIterations);
       runSolver(pathToItemRatings(), pathToM(currentIteration), pathToU(currentIteration), currentIteration, "M",
-                numUsers);
+          numUsers);
     }
 
     return 0;

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/PredictionMapper.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/PredictionMapper.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/PredictionMapper.java
index 6bcb500..6e7ea81 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/PredictionMapper.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/als/PredictionMapper.java
@@ -63,7 +63,7 @@ public class PredictionMapper extends SharingMapper<IntWritable,VectorWritable,L
     OpenIntObjectHashMap<Vector> U = ALS.readMatrixByRows(pathToU, conf);
     OpenIntObjectHashMap<Vector> M = ALS.readMatrixByRows(pathToM, conf);
 
-    return new Pair<OpenIntObjectHashMap<Vector>, OpenIntObjectHashMap<Vector>>(U, M);
+    return new Pair<>(U, M);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/AggregateAndRecommendReducer.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/AggregateAndRecommendReducer.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/AggregateAndRecommendReducer.java
index 13bfbeb..b44fd5b 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/AggregateAndRecommendReducer.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/AggregateAndRecommendReducer.java
@@ -212,10 +212,8 @@ public final class AggregateAndRecommendReducer extends
       return true;
     } else if (itemsForUser != null) {
       return itemsForUser.contains(itemID);
-    } else if (allItemsToRecommendFor != null) {
-      return allItemsToRecommendFor.contains(itemID);
     } else {
-      return false;
+      return allItemsToRecommendFor.contains(itemID);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/IDReader.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/IDReader.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/IDReader.java
index 9dce435..b8cf6bb 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/IDReader.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/IDReader.java
@@ -17,13 +17,12 @@
 
 package org.apache.mahout.cf.taste.hadoop.item;
 
+import com.google.common.collect.Maps;
+import com.google.common.io.Closeables;
 import java.io.IOException;
 import java.io.InputStream;
-import java.util.HashMap;
 import java.util.Map;
 import java.util.regex.Pattern;
-
-import com.google.common.collect.Maps;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.mahout.cf.taste.impl.common.FastIDSet;
@@ -32,8 +31,6 @@ import org.apache.mahout.common.iterator.FileLineIterable;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.io.Closeables;
-
 /**
  * Reads user ids and item ids from files specified in usersFile, itemsFile or userItemFile options in item-based
  *  recommender. Composes a list of users and a list of items which can be used by
@@ -118,7 +115,7 @@ public class IDReader {
     } else if (isUserItemFileSpecified() && !isUsersFileSpecified()) {
       readUserItemFilterIfNeeded();
       userIds = extractAllUserIdsFromUserItemFilter(userItemFilter);
-    } else if (!isUsersFileSpecified() && !isUserItemFileSpecified()) {
+    } else if (!isUsersFileSpecified()) {
       throw new IllegalStateException("Neither usersFile nor userItemFile options are specified");
     } else {
       throw new IllegalStateException("usersFile and userItemFile options cannot be used simultaneously");
@@ -131,7 +128,7 @@ public class IDReader {
     } else if (isUserItemFileSpecified() && !isItemsFileSpecified()) {
       readUserItemFilterIfNeeded();
       itemIds = extractAllItemIdsFromUserItemFilter(userItemFilter);
-    } else if (!isItemsFileSpecified() && !isUserItemFileSpecified()) {
+    } else if (!isItemsFileSpecified()) {
       throw new IllegalStateException("Neither itemsFile nor userItemFile options are specified");
     } else {
       throw new IllegalStateException("itemsFile and userItemFile options cannot be specified simultaneously");
@@ -151,7 +148,7 @@ public class IDReader {
     try {
       for (String line : new FileLineIterable(in)) {
         try {
-          String[] tokens = SEPARATOR.split(line.toString());
+          String[] tokens = SEPARATOR.split(line);
           Long userId = Long.parseLong(tokens[0]);
           Long itemId = Long.parseLong(tokens[1]);
 

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/UserVectorSplitterMapper.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/UserVectorSplitterMapper.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/UserVectorSplitterMapper.java
index 66a412d..2290d06 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/UserVectorSplitterMapper.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/hadoop/item/UserVectorSplitterMapper.java
@@ -62,6 +62,9 @@ public final class UserVectorSplitterMapper extends
                      VectorWritable value,
                      Context context) throws IOException, InterruptedException {
     long userID = key.get();
+
+    log.info("UserID = {}", userID);
+
     if (usersToRecommendFor != null && !usersToRecommendFor.contains(userID)) {
       return;
     }

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/eval/AbstractDifferenceRecommenderEvaluator.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/eval/AbstractDifferenceRecommenderEvaluator.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/eval/AbstractDifferenceRecommenderEvaluator.java
index b5d2f85..f6598f3 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/eval/AbstractDifferenceRecommenderEvaluator.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/eval/AbstractDifferenceRecommenderEvaluator.java
@@ -106,9 +106,9 @@ public abstract class AbstractDifferenceRecommenderEvaluator implements Recommen
     log.info("Beginning evaluation using {} of {}", trainingPercentage, dataModel);
     
     int numUsers = dataModel.getNumUsers();
-    FastByIDMap<PreferenceArray> trainingPrefs = new FastByIDMap<PreferenceArray>(
+    FastByIDMap<PreferenceArray> trainingPrefs = new FastByIDMap<>(
         1 + (int) (evaluationPercentage * numUsers));
-    FastByIDMap<PreferenceArray> testPrefs = new FastByIDMap<PreferenceArray>(
+    FastByIDMap<PreferenceArray> testPrefs = new FastByIDMap<>(
         1 + (int) (evaluationPercentage * numUsers));
     
     LongPrimitiveIterator it = dataModel.getUserIDs();

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/eval/GenericRecommenderIRStatsEvaluator.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/eval/GenericRecommenderIRStatsEvaluator.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/eval/GenericRecommenderIRStatsEvaluator.java
index e29125d..0e121d1 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/eval/GenericRecommenderIRStatsEvaluator.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/eval/GenericRecommenderIRStatsEvaluator.java
@@ -127,7 +127,7 @@ public final class GenericRecommenderIRStatsEvaluator implements RecommenderIRSt
         continue;
       }
 
-      FastByIDMap<PreferenceArray> trainingUsers = new FastByIDMap<PreferenceArray>(dataModel.getNumUsers());
+      FastByIDMap<PreferenceArray> trainingUsers = new FastByIDMap<>(dataModel.getNumUsers());
       LongPrimitiveIterator it2 = dataModel.getUserIDs();
       while (it2.hasNext()) {
         dataSplitter.processOtherUser(userID, relevantItemIDs, trainingUsers, it2.nextLong(), dataModel);

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/GenericBooleanPrefDataModel.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/GenericBooleanPrefDataModel.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/GenericBooleanPrefDataModel.java
index 43ccb4e..2c1ff4d 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/GenericBooleanPrefDataModel.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/GenericBooleanPrefDataModel.java
@@ -74,7 +74,7 @@ public final class GenericBooleanPrefDataModel extends AbstractDataModel {
     Preconditions.checkArgument(userData != null, "userData is null");
 
     this.preferenceFromUsers = userData;
-    this.preferenceForItems = new FastByIDMap<FastIDSet>();
+    this.preferenceForItems = new FastByIDMap<>();
     FastIDSet itemIDSet = new FastIDSet();
     for (Map.Entry<Long, FastIDSet> entry : preferenceFromUsers.entrySet()) {
       long userID = entry.getKey();
@@ -132,7 +132,7 @@ public final class GenericBooleanPrefDataModel extends AbstractDataModel {
    *  that user's associated items
    */
   public static FastByIDMap<FastIDSet> toDataMap(DataModel dataModel) throws TasteException {
-    FastByIDMap<FastIDSet> data = new FastByIDMap<FastIDSet>(dataModel.getNumUsers());
+    FastByIDMap<FastIDSet> data = new FastByIDMap<>(dataModel.getNumUsers());
     LongPrimitiveIterator it = dataModel.getUserIDs();
     while (it.hasNext()) {
       long userID = it.nextLong();

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/GenericDataModel.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/GenericDataModel.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/GenericDataModel.java
index a898d85..f58d349 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/GenericDataModel.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/GenericDataModel.java
@@ -81,7 +81,7 @@ public final class GenericDataModel extends AbstractDataModel {
     Preconditions.checkArgument(userData != null, "userData is null");
 
     this.preferenceFromUsers = userData;
-    FastByIDMap<Collection<Preference>> prefsForItems = new FastByIDMap<Collection<Preference>>();
+    FastByIDMap<Collection<Preference>> prefsForItems = new FastByIDMap<>();
     FastIDSet itemIDSet = new FastIDSet();
     int currentCount = 0;
     float maxPrefValue = Float.NEGATIVE_INFINITY;
@@ -174,7 +174,7 @@ public final class GenericDataModel extends AbstractDataModel {
    *  that user's preferences
    */
   public static FastByIDMap<PreferenceArray> toDataMap(DataModel dataModel) throws TasteException {
-    FastByIDMap<PreferenceArray> data = new FastByIDMap<PreferenceArray>(dataModel.getNumUsers());
+    FastByIDMap<PreferenceArray> data = new FastByIDMap<>(dataModel.getNumUsers());
     LongPrimitiveIterator it = dataModel.getUserIDs();
     while (it.hasNext()) {
       long userID = it.nextLong();

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/MemoryIDMigrator.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/MemoryIDMigrator.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/MemoryIDMigrator.java
index d511094..3463ff5 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/MemoryIDMigrator.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/MemoryIDMigrator.java
@@ -28,7 +28,7 @@ public final class MemoryIDMigrator extends AbstractIDMigrator implements Updata
   private final FastByIDMap<String> longToString;
   
   public MemoryIDMigrator() {
-    this.longToString = new FastByIDMap<String>(100);
+    this.longToString = new FastByIDMap<>(100);
   }
   
   @Override

http://git-wip-us.apache.org/repos/asf/mahout/blob/87c15bea/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/PlusAnonymousConcurrentUserDataModel.java
----------------------------------------------------------------------
diff --git a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/PlusAnonymousConcurrentUserDataModel.java b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/PlusAnonymousConcurrentUserDataModel.java
index e7ccb40..c97a545 100644
--- a/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/PlusAnonymousConcurrentUserDataModel.java
+++ b/mrlegacy/src/main/java/org/apache/mahout/cf/taste/impl/model/PlusAnonymousConcurrentUserDataModel.java
@@ -102,8 +102,8 @@ public final class PlusAnonymousConcurrentUserDataModel extends PlusAnonymousUse
   public PlusAnonymousConcurrentUserDataModel(DataModel delegate, int maxConcurrentUsers) {
     super(delegate);
 
-    tempPrefs = new ConcurrentHashMap<Long, PreferenceArray>();
-    prefItemIDs = new ConcurrentHashMap<Long, FastIDSet>();
+    tempPrefs = new ConcurrentHashMap<>();
+    prefItemIDs = new ConcurrentHashMap<>();
 
     initializeUsersPools(maxConcurrentUsers);
   }
@@ -114,7 +114,7 @@ public final class PlusAnonymousConcurrentUserDataModel extends PlusAnonymousUse
    * @param usersPoolSize Maximum allowed number of concurrent anonymous user. Depends on the consumer system.
    */
   private void initializeUsersPools(int usersPoolSize) {
-    usersPool = new ConcurrentLinkedQueue<Long>();
+    usersPool = new ConcurrentLinkedQueue<>();
     for (int i = 0; i < usersPoolSize; i++) {
       usersPool.add(TEMP_USER_ID + i);
     }